mirror of
https://github.com/thegeeklab/ansible-doctor.git
synced 2024-11-14 01:00:40 +00:00
Compare commits
No commits in common. "main" and "v1.2.0" have entirely different histories.
23
.chglog/CHANGELOG.tpl.md
Executable file
23
.chglog/CHANGELOG.tpl.md
Executable file
@ -0,0 +1,23 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
{{ range .Versions -}}
|
||||||
|
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]({{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}){{ else }}{{ .Tag.Name }}{{ end }} ({{ datetime "2006-01-02" .Tag.Date }})
|
||||||
|
|
||||||
|
{{ range .CommitGroups -}}
|
||||||
|
### {{ .Title }}
|
||||||
|
|
||||||
|
{{ range .Commits -}}
|
||||||
|
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ (regexReplaceAll "(.*)/issues/(.*)" (regexReplaceAll "(Co-\\w*-by.*)" .Subject "") "${1}/pull/${2}") | trim }}
|
||||||
|
{{ end }}
|
||||||
|
{{- end -}}
|
||||||
|
|
||||||
|
{{- if .NoteGroups -}}
|
||||||
|
{{ range .NoteGroups -}}
|
||||||
|
### {{ .Title }}
|
||||||
|
|
||||||
|
{{ range .Notes }}
|
||||||
|
{{ .Body }}
|
||||||
|
{{ end }}
|
||||||
|
{{ end -}}
|
||||||
|
{{ end -}}
|
||||||
|
{{ end -}}
|
25
.chglog/config.yml
Executable file
25
.chglog/config.yml
Executable file
@ -0,0 +1,25 @@
|
|||||||
|
style: github
|
||||||
|
template: CHANGELOG.tpl.md
|
||||||
|
info:
|
||||||
|
title: CHANGELOG
|
||||||
|
repository_url: https://github.com/thegeeklab/ansible-doctor
|
||||||
|
options:
|
||||||
|
commit_groups:
|
||||||
|
title_maps:
|
||||||
|
feat: Features
|
||||||
|
fix: Bug Fixes
|
||||||
|
perf: Performance Improvements
|
||||||
|
refactor: Code Refactoring
|
||||||
|
chore: Others
|
||||||
|
test: Testing
|
||||||
|
ci: CI Pipeline
|
||||||
|
docs: Documentation
|
||||||
|
header:
|
||||||
|
pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$"
|
||||||
|
pattern_maps:
|
||||||
|
- Type
|
||||||
|
- Scope
|
||||||
|
- Subject
|
||||||
|
notes:
|
||||||
|
keywords:
|
||||||
|
- BREAKING CHANGE
|
@ -8,6 +8,3 @@ SELinux
|
|||||||
xoxys
|
xoxys
|
||||||
ansible-.+
|
ansible-.+
|
||||||
toc
|
toc
|
||||||
GPL-3.0
|
|
||||||
(P|p)re-(C|c)ommit
|
|
||||||
JSON
|
|
||||||
|
493
.drone.jsonnet
Normal file
493
.drone.jsonnet
Normal file
@ -0,0 +1,493 @@
|
|||||||
|
local PythonVersion(pyversion='3.7') = {
|
||||||
|
name: 'python' + std.strReplace(pyversion, '.', '') + '-pytest',
|
||||||
|
image: 'python:' + pyversion,
|
||||||
|
environment: {
|
||||||
|
PY_COLORS: 1,
|
||||||
|
},
|
||||||
|
commands: [
|
||||||
|
'pip install poetry poetry-dynamic-versioning -qq',
|
||||||
|
'poetry config experimental.new-installer false',
|
||||||
|
'poetry install',
|
||||||
|
'poetry version',
|
||||||
|
'poetry run ansible-doctor --help',
|
||||||
|
],
|
||||||
|
depends_on: [
|
||||||
|
'fetch',
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
local PipelineLint = {
|
||||||
|
kind: 'pipeline',
|
||||||
|
name: 'lint',
|
||||||
|
platform: {
|
||||||
|
os: 'linux',
|
||||||
|
arch: 'amd64',
|
||||||
|
},
|
||||||
|
steps: [
|
||||||
|
{
|
||||||
|
name: 'yapf',
|
||||||
|
image: 'python:3.10',
|
||||||
|
environment: {
|
||||||
|
PY_COLORS: 1,
|
||||||
|
},
|
||||||
|
commands: [
|
||||||
|
'git fetch -tq',
|
||||||
|
'pip install poetry poetry-dynamic-versioning -qq',
|
||||||
|
'poetry config experimental.new-installer false',
|
||||||
|
'poetry install',
|
||||||
|
'poetry run yapf -dr ./ansibledoctor',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'flake8',
|
||||||
|
image: 'python:3.10',
|
||||||
|
environment: {
|
||||||
|
PY_COLORS: 1,
|
||||||
|
},
|
||||||
|
commands: [
|
||||||
|
'git fetch -tq',
|
||||||
|
'pip install poetry poetry-dynamic-versioning -qq',
|
||||||
|
'poetry config experimental.new-installer false',
|
||||||
|
'poetry install',
|
||||||
|
'poetry run flake8 ./ansibledoctor',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
trigger: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
local PipelineTest = {
|
||||||
|
kind: 'pipeline',
|
||||||
|
name: 'test',
|
||||||
|
platform: {
|
||||||
|
os: 'linux',
|
||||||
|
arch: 'amd64',
|
||||||
|
},
|
||||||
|
steps: [
|
||||||
|
{
|
||||||
|
name: 'fetch',
|
||||||
|
image: 'python:3.10',
|
||||||
|
commands: [
|
||||||
|
'git fetch -tq',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
PythonVersion(pyversion='3.7'),
|
||||||
|
PythonVersion(pyversion='3.8'),
|
||||||
|
PythonVersion(pyversion='3.9'),
|
||||||
|
PythonVersion(pyversion='3.10'),
|
||||||
|
],
|
||||||
|
depends_on: [
|
||||||
|
'lint',
|
||||||
|
],
|
||||||
|
trigger: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
local PipelineSecurity = {
|
||||||
|
kind: 'pipeline',
|
||||||
|
name: 'security',
|
||||||
|
platform: {
|
||||||
|
os: 'linux',
|
||||||
|
arch: 'amd64',
|
||||||
|
},
|
||||||
|
steps: [
|
||||||
|
{
|
||||||
|
name: 'bandit',
|
||||||
|
image: 'python:3.10',
|
||||||
|
environment: {
|
||||||
|
PY_COLORS: 1,
|
||||||
|
},
|
||||||
|
commands: [
|
||||||
|
'git fetch -tq',
|
||||||
|
'pip install poetry poetry-dynamic-versioning -qq',
|
||||||
|
'poetry config experimental.new-installer false',
|
||||||
|
'poetry install',
|
||||||
|
'poetry run bandit -r ./ansibledoctor -x ./ansibledoctor/test',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
depends_on: [
|
||||||
|
'test',
|
||||||
|
],
|
||||||
|
trigger: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
local PipelineBuildPackage = {
|
||||||
|
kind: 'pipeline',
|
||||||
|
name: 'build-package',
|
||||||
|
platform: {
|
||||||
|
os: 'linux',
|
||||||
|
arch: 'amd64',
|
||||||
|
},
|
||||||
|
steps: [
|
||||||
|
{
|
||||||
|
name: 'build',
|
||||||
|
image: 'python:3.10',
|
||||||
|
commands: [
|
||||||
|
'git fetch -tq',
|
||||||
|
'pip install poetry poetry-dynamic-versioning -qq',
|
||||||
|
'poetry build',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'checksum',
|
||||||
|
image: 'alpine',
|
||||||
|
commands: [
|
||||||
|
'cd dist/ && sha256sum * > ../sha256sum.txt',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'changelog-generate',
|
||||||
|
image: 'thegeeklab/git-chglog',
|
||||||
|
commands: [
|
||||||
|
'git fetch -tq',
|
||||||
|
'git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'changelog-format',
|
||||||
|
image: 'thegeeklab/alpine-tools',
|
||||||
|
commands: [
|
||||||
|
'prettier CHANGELOG.md',
|
||||||
|
'prettier -w CHANGELOG.md',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'publish-github',
|
||||||
|
image: 'plugins/github-release',
|
||||||
|
settings: {
|
||||||
|
overwrite: true,
|
||||||
|
api_key: { from_secret: 'github_token' },
|
||||||
|
files: ['dist/*', 'sha256sum.txt'],
|
||||||
|
title: '${DRONE_TAG}',
|
||||||
|
note: 'CHANGELOG.md',
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
ref: ['refs/tags/**'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'publish-pypi',
|
||||||
|
image: 'python:3.10',
|
||||||
|
commands: [
|
||||||
|
'git fetch -tq',
|
||||||
|
'pip install poetry poetry-dynamic-versioning -qq',
|
||||||
|
'poetry publish -n',
|
||||||
|
],
|
||||||
|
environment: {
|
||||||
|
POETRY_HTTP_BASIC_PYPI_USERNAME: { from_secret: 'pypi_username' },
|
||||||
|
POETRY_HTTP_BASIC_PYPI_PASSWORD: { from_secret: 'pypi_password' },
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
ref: ['refs/tags/**'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
depends_on: [
|
||||||
|
'security',
|
||||||
|
],
|
||||||
|
trigger: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
local PipelineBuildContainer(arch='amd64') = {
|
||||||
|
local build = if arch == 'arm' then [{
|
||||||
|
name: 'build',
|
||||||
|
image: 'python:3.10-alpine',
|
||||||
|
commands: [
|
||||||
|
'apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo',
|
||||||
|
'git fetch -tq',
|
||||||
|
'pip install poetry poetry-dynamic-versioning -qq',
|
||||||
|
'poetry build',
|
||||||
|
],
|
||||||
|
environment: {
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI: true,
|
||||||
|
},
|
||||||
|
}] else [{
|
||||||
|
name: 'build',
|
||||||
|
image: 'python:3.10',
|
||||||
|
commands: [
|
||||||
|
'git fetch -tq',
|
||||||
|
'pip install poetry poetry-dynamic-versioning -qq',
|
||||||
|
'poetry build',
|
||||||
|
],
|
||||||
|
}],
|
||||||
|
|
||||||
|
kind: 'pipeline',
|
||||||
|
name: 'build-container-' + arch,
|
||||||
|
platform: {
|
||||||
|
os: 'linux',
|
||||||
|
arch: arch,
|
||||||
|
},
|
||||||
|
steps: build + [
|
||||||
|
{
|
||||||
|
name: 'dryrun',
|
||||||
|
image: 'thegeeklab/drone-docker:19',
|
||||||
|
settings: {
|
||||||
|
dry_run: true,
|
||||||
|
dockerfile: 'docker/Dockerfile.' + arch,
|
||||||
|
repo: 'thegeeklab/${DRONE_REPO_NAME}',
|
||||||
|
username: { from_secret: 'docker_username' },
|
||||||
|
password: { from_secret: 'docker_password' },
|
||||||
|
},
|
||||||
|
depends_on: ['build'],
|
||||||
|
when: {
|
||||||
|
ref: ['refs/pull/**'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'publish-dockerhub',
|
||||||
|
image: 'thegeeklab/drone-docker:19',
|
||||||
|
settings: {
|
||||||
|
auto_tag: true,
|
||||||
|
auto_tag_suffix: arch,
|
||||||
|
dockerfile: 'docker/Dockerfile.' + arch,
|
||||||
|
repo: 'thegeeklab/${DRONE_REPO_NAME}',
|
||||||
|
username: { from_secret: 'docker_username' },
|
||||||
|
password: { from_secret: 'docker_password' },
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**'],
|
||||||
|
},
|
||||||
|
depends_on: ['dryrun'],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'publish-quay',
|
||||||
|
image: 'thegeeklab/drone-docker:19',
|
||||||
|
settings: {
|
||||||
|
auto_tag: true,
|
||||||
|
auto_tag_suffix: arch,
|
||||||
|
dockerfile: 'docker/Dockerfile.' + arch,
|
||||||
|
registry: 'quay.io',
|
||||||
|
repo: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
|
||||||
|
username: { from_secret: 'quay_username' },
|
||||||
|
password: { from_secret: 'quay_password' },
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**'],
|
||||||
|
},
|
||||||
|
depends_on: ['dryrun'],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
depends_on: [
|
||||||
|
'security',
|
||||||
|
],
|
||||||
|
trigger: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
local PipelineDocs = {
|
||||||
|
kind: 'pipeline',
|
||||||
|
name: 'docs',
|
||||||
|
platform: {
|
||||||
|
os: 'linux',
|
||||||
|
arch: 'amd64',
|
||||||
|
},
|
||||||
|
concurrency: {
|
||||||
|
limit: 1,
|
||||||
|
},
|
||||||
|
steps: [
|
||||||
|
{
|
||||||
|
name: 'assets',
|
||||||
|
image: 'thegeeklab/alpine-tools',
|
||||||
|
commands: [
|
||||||
|
'make doc',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'markdownlint',
|
||||||
|
image: 'thegeeklab/markdownlint-cli',
|
||||||
|
commands: [
|
||||||
|
"markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'spellcheck',
|
||||||
|
image: 'node:lts-alpine',
|
||||||
|
commands: [
|
||||||
|
'npm install -g spellchecker-cli',
|
||||||
|
"spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions",
|
||||||
|
],
|
||||||
|
environment: {
|
||||||
|
FORCE_COLOR: true,
|
||||||
|
NPM_CONFIG_LOGLEVEL: 'error',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'testbuild',
|
||||||
|
image: 'thegeeklab/hugo:0.91.0',
|
||||||
|
commands: [
|
||||||
|
'hugo -s docs/ -b http://localhost/',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'link-validation',
|
||||||
|
image: 'thegeeklab/link-validator',
|
||||||
|
commands: [
|
||||||
|
'link-validator -ro',
|
||||||
|
],
|
||||||
|
environment: {
|
||||||
|
LINK_VALIDATOR_BASE_DIR: 'docs/public',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'build',
|
||||||
|
image: 'thegeeklab/hugo:0.91.0',
|
||||||
|
commands: [
|
||||||
|
'hugo -s docs/',
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'beautify',
|
||||||
|
image: 'node:lts-alpine',
|
||||||
|
commands: [
|
||||||
|
'npm install -g js-beautify',
|
||||||
|
"html-beautify -r -f 'docs/public/**/*.html'",
|
||||||
|
],
|
||||||
|
environment: {
|
||||||
|
FORCE_COLOR: true,
|
||||||
|
NPM_CONFIG_LOGLEVEL: 'error',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'publish',
|
||||||
|
image: 'plugins/s3-sync',
|
||||||
|
settings: {
|
||||||
|
access_key: { from_secret: 's3_access_key' },
|
||||||
|
bucket: 'geekdocs',
|
||||||
|
delete: true,
|
||||||
|
endpoint: 'https://sp.rknet.org',
|
||||||
|
path_style: true,
|
||||||
|
secret_key: { from_secret: 's3_secret_access_key' },
|
||||||
|
source: 'docs/public/',
|
||||||
|
strip_prefix: 'docs/public/',
|
||||||
|
target: '/${DRONE_REPO_NAME}',
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
depends_on: [
|
||||||
|
'build-package',
|
||||||
|
'build-container-amd64',
|
||||||
|
'build-container-arm64',
|
||||||
|
'build-container-arm',
|
||||||
|
],
|
||||||
|
trigger: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
local PipelineNotifications = {
|
||||||
|
kind: 'pipeline',
|
||||||
|
name: 'notifications',
|
||||||
|
platform: {
|
||||||
|
os: 'linux',
|
||||||
|
arch: 'amd64',
|
||||||
|
},
|
||||||
|
steps: [
|
||||||
|
{
|
||||||
|
image: 'plugins/manifest',
|
||||||
|
name: 'manifest-dockerhub',
|
||||||
|
settings: {
|
||||||
|
ignore_missing: true,
|
||||||
|
auto_tag: true,
|
||||||
|
username: { from_secret: 'docker_username' },
|
||||||
|
password: { from_secret: 'docker_password' },
|
||||||
|
spec: 'docker/manifest.tmpl',
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
status: ['success'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
image: 'plugins/manifest',
|
||||||
|
name: 'manifest-quay',
|
||||||
|
settings: {
|
||||||
|
ignore_missing: true,
|
||||||
|
auto_tag: true,
|
||||||
|
username: { from_secret: 'quay_username' },
|
||||||
|
password: { from_secret: 'quay_password' },
|
||||||
|
spec: 'docker/manifest-quay.tmpl',
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
status: ['success'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'pushrm-dockerhub',
|
||||||
|
pull: 'always',
|
||||||
|
image: 'chko/docker-pushrm:1',
|
||||||
|
environment: {
|
||||||
|
DOCKER_PASS: {
|
||||||
|
from_secret: 'docker_password',
|
||||||
|
},
|
||||||
|
DOCKER_USER: {
|
||||||
|
from_secret: 'docker_username',
|
||||||
|
},
|
||||||
|
PUSHRM_FILE: 'README.md',
|
||||||
|
PUSHRM_SHORT: 'Annotation based documentation for your Ansible roles',
|
||||||
|
PUSHRM_TARGET: 'thegeeklab/${DRONE_REPO_NAME}',
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
status: ['success'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'pushrm-quay',
|
||||||
|
pull: 'always',
|
||||||
|
image: 'chko/docker-pushrm:1',
|
||||||
|
environment: {
|
||||||
|
APIKEY__QUAY_IO: {
|
||||||
|
from_secret: 'quay_token',
|
||||||
|
},
|
||||||
|
PUSHRM_FILE: 'README.md',
|
||||||
|
PUSHRM_TARGET: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
status: ['success'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: 'matrix',
|
||||||
|
image: 'thegeeklab/drone-matrix',
|
||||||
|
settings: {
|
||||||
|
homeserver: { from_secret: 'matrix_homeserver' },
|
||||||
|
roomid: { from_secret: 'matrix_roomid' },
|
||||||
|
template: 'Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}',
|
||||||
|
username: { from_secret: 'matrix_username' },
|
||||||
|
password: { from_secret: 'matrix_password' },
|
||||||
|
},
|
||||||
|
when: {
|
||||||
|
status: ['success', 'failure'],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
depends_on: [
|
||||||
|
'docs',
|
||||||
|
],
|
||||||
|
trigger: {
|
||||||
|
ref: ['refs/heads/main', 'refs/tags/**'],
|
||||||
|
status: ['success', 'failure'],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
[
|
||||||
|
PipelineLint,
|
||||||
|
PipelineTest,
|
||||||
|
PipelineSecurity,
|
||||||
|
PipelineBuildPackage,
|
||||||
|
PipelineBuildContainer(arch='amd64'),
|
||||||
|
PipelineBuildContainer(arch='arm64'),
|
||||||
|
PipelineBuildContainer(arch='arm'),
|
||||||
|
PipelineDocs,
|
||||||
|
PipelineNotifications,
|
||||||
|
]
|
635
.drone.yml
Normal file
635
.drone.yml
Normal file
@ -0,0 +1,635 @@
|
|||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: lint
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: yapf
|
||||||
|
image: python:3.10
|
||||||
|
commands:
|
||||||
|
- git fetch -tq
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry config experimental.new-installer false
|
||||||
|
- poetry install
|
||||||
|
- poetry run yapf -dr ./ansibledoctor
|
||||||
|
environment:
|
||||||
|
PY_COLORS: 1
|
||||||
|
|
||||||
|
- name: flake8
|
||||||
|
image: python:3.10
|
||||||
|
commands:
|
||||||
|
- git fetch -tq
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry config experimental.new-installer false
|
||||||
|
- poetry install
|
||||||
|
- poetry run flake8 ./ansibledoctor
|
||||||
|
environment:
|
||||||
|
PY_COLORS: 1
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
- refs/pull/**
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: test
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: fetch
|
||||||
|
image: python:3.10
|
||||||
|
commands:
|
||||||
|
- git fetch -tq
|
||||||
|
|
||||||
|
- name: python37-pytest
|
||||||
|
image: python:3.7
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry config experimental.new-installer false
|
||||||
|
- poetry install
|
||||||
|
- poetry version
|
||||||
|
- poetry run ansible-doctor --help
|
||||||
|
environment:
|
||||||
|
PY_COLORS: 1
|
||||||
|
depends_on:
|
||||||
|
- fetch
|
||||||
|
|
||||||
|
- name: python38-pytest
|
||||||
|
image: python:3.8
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry config experimental.new-installer false
|
||||||
|
- poetry install
|
||||||
|
- poetry version
|
||||||
|
- poetry run ansible-doctor --help
|
||||||
|
environment:
|
||||||
|
PY_COLORS: 1
|
||||||
|
depends_on:
|
||||||
|
- fetch
|
||||||
|
|
||||||
|
- name: python39-pytest
|
||||||
|
image: python:3.9
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry config experimental.new-installer false
|
||||||
|
- poetry install
|
||||||
|
- poetry version
|
||||||
|
- poetry run ansible-doctor --help
|
||||||
|
environment:
|
||||||
|
PY_COLORS: 1
|
||||||
|
depends_on:
|
||||||
|
- fetch
|
||||||
|
|
||||||
|
- name: python310-pytest
|
||||||
|
image: python:3.10
|
||||||
|
commands:
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry config experimental.new-installer false
|
||||||
|
- poetry install
|
||||||
|
- poetry version
|
||||||
|
- poetry run ansible-doctor --help
|
||||||
|
environment:
|
||||||
|
PY_COLORS: 1
|
||||||
|
depends_on:
|
||||||
|
- fetch
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
- refs/pull/**
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- lint
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: security
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: bandit
|
||||||
|
image: python:3.10
|
||||||
|
commands:
|
||||||
|
- git fetch -tq
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry config experimental.new-installer false
|
||||||
|
- poetry install
|
||||||
|
- poetry run bandit -r ./ansibledoctor -x ./ansibledoctor/test
|
||||||
|
environment:
|
||||||
|
PY_COLORS: 1
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
- refs/pull/**
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- test
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: build-package
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: python:3.10
|
||||||
|
commands:
|
||||||
|
- git fetch -tq
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry build
|
||||||
|
|
||||||
|
- name: checksum
|
||||||
|
image: alpine
|
||||||
|
commands:
|
||||||
|
- cd dist/ && sha256sum * > ../sha256sum.txt
|
||||||
|
|
||||||
|
- name: changelog-generate
|
||||||
|
image: thegeeklab/git-chglog
|
||||||
|
commands:
|
||||||
|
- git fetch -tq
|
||||||
|
- git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}
|
||||||
|
|
||||||
|
- name: changelog-format
|
||||||
|
image: thegeeklab/alpine-tools
|
||||||
|
commands:
|
||||||
|
- prettier CHANGELOG.md
|
||||||
|
- prettier -w CHANGELOG.md
|
||||||
|
|
||||||
|
- name: publish-github
|
||||||
|
image: plugins/github-release
|
||||||
|
settings:
|
||||||
|
api_key:
|
||||||
|
from_secret: github_token
|
||||||
|
files:
|
||||||
|
- dist/*
|
||||||
|
- sha256sum.txt
|
||||||
|
note: CHANGELOG.md
|
||||||
|
overwrite: true
|
||||||
|
title: ${DRONE_TAG}
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/tags/**
|
||||||
|
|
||||||
|
- name: publish-pypi
|
||||||
|
image: python:3.10
|
||||||
|
commands:
|
||||||
|
- git fetch -tq
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry publish -n
|
||||||
|
environment:
|
||||||
|
POETRY_HTTP_BASIC_PYPI_PASSWORD:
|
||||||
|
from_secret: pypi_password
|
||||||
|
POETRY_HTTP_BASIC_PYPI_USERNAME:
|
||||||
|
from_secret: pypi_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/tags/**
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
- refs/pull/**
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- security
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: build-container-amd64
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: python:3.10
|
||||||
|
commands:
|
||||||
|
- git fetch -tq
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry build
|
||||||
|
|
||||||
|
- name: dryrun
|
||||||
|
image: thegeeklab/drone-docker:19
|
||||||
|
settings:
|
||||||
|
dockerfile: docker/Dockerfile.amd64
|
||||||
|
dry_run: true
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
repo: thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/pull/**
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
- name: publish-dockerhub
|
||||||
|
image: thegeeklab/drone-docker:19
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
auto_tag_suffix: amd64
|
||||||
|
dockerfile: docker/Dockerfile.amd64
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
repo: thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
depends_on:
|
||||||
|
- dryrun
|
||||||
|
|
||||||
|
- name: publish-quay
|
||||||
|
image: thegeeklab/drone-docker:19
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
auto_tag_suffix: amd64
|
||||||
|
dockerfile: docker/Dockerfile.amd64
|
||||||
|
password:
|
||||||
|
from_secret: quay_password
|
||||||
|
registry: quay.io
|
||||||
|
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
username:
|
||||||
|
from_secret: quay_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
depends_on:
|
||||||
|
- dryrun
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
- refs/pull/**
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- security
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: build-container-arm64
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: arm64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: python:3.10
|
||||||
|
commands:
|
||||||
|
- git fetch -tq
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry build
|
||||||
|
|
||||||
|
- name: dryrun
|
||||||
|
image: thegeeklab/drone-docker:19
|
||||||
|
settings:
|
||||||
|
dockerfile: docker/Dockerfile.arm64
|
||||||
|
dry_run: true
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
repo: thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/pull/**
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
- name: publish-dockerhub
|
||||||
|
image: thegeeklab/drone-docker:19
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
auto_tag_suffix: arm64
|
||||||
|
dockerfile: docker/Dockerfile.arm64
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
repo: thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
depends_on:
|
||||||
|
- dryrun
|
||||||
|
|
||||||
|
- name: publish-quay
|
||||||
|
image: thegeeklab/drone-docker:19
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
auto_tag_suffix: arm64
|
||||||
|
dockerfile: docker/Dockerfile.arm64
|
||||||
|
password:
|
||||||
|
from_secret: quay_password
|
||||||
|
registry: quay.io
|
||||||
|
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
username:
|
||||||
|
from_secret: quay_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
depends_on:
|
||||||
|
- dryrun
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
- refs/pull/**
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- security
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: build-container-arm
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: arm
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: build
|
||||||
|
image: python:3.10-alpine
|
||||||
|
commands:
|
||||||
|
- apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo
|
||||||
|
- git fetch -tq
|
||||||
|
- pip install poetry poetry-dynamic-versioning -qq
|
||||||
|
- poetry build
|
||||||
|
environment:
|
||||||
|
CARGO_NET_GIT_FETCH_WITH_CLI: true
|
||||||
|
|
||||||
|
- name: dryrun
|
||||||
|
image: thegeeklab/drone-docker:19
|
||||||
|
settings:
|
||||||
|
dockerfile: docker/Dockerfile.arm
|
||||||
|
dry_run: true
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
repo: thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/pull/**
|
||||||
|
depends_on:
|
||||||
|
- build
|
||||||
|
|
||||||
|
- name: publish-dockerhub
|
||||||
|
image: thegeeklab/drone-docker:19
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
auto_tag_suffix: arm
|
||||||
|
dockerfile: docker/Dockerfile.arm
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
repo: thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
depends_on:
|
||||||
|
- dryrun
|
||||||
|
|
||||||
|
- name: publish-quay
|
||||||
|
image: thegeeklab/drone-docker:19
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
auto_tag_suffix: arm
|
||||||
|
dockerfile: docker/Dockerfile.arm
|
||||||
|
password:
|
||||||
|
from_secret: quay_password
|
||||||
|
registry: quay.io
|
||||||
|
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
username:
|
||||||
|
from_secret: quay_username
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
depends_on:
|
||||||
|
- dryrun
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
- refs/pull/**
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- security
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: docs
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
limit: 1
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: assets
|
||||||
|
image: thegeeklab/alpine-tools
|
||||||
|
commands:
|
||||||
|
- make doc
|
||||||
|
|
||||||
|
- name: markdownlint
|
||||||
|
image: thegeeklab/markdownlint-cli
|
||||||
|
commands:
|
||||||
|
- markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'
|
||||||
|
|
||||||
|
- name: spellcheck
|
||||||
|
image: node:lts-alpine
|
||||||
|
commands:
|
||||||
|
- npm install -g spellchecker-cli
|
||||||
|
- spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions
|
||||||
|
environment:
|
||||||
|
FORCE_COLOR: true
|
||||||
|
NPM_CONFIG_LOGLEVEL: error
|
||||||
|
|
||||||
|
- name: testbuild
|
||||||
|
image: thegeeklab/hugo:0.91.0
|
||||||
|
commands:
|
||||||
|
- hugo -s docs/ -b http://localhost/
|
||||||
|
|
||||||
|
- name: link-validation
|
||||||
|
image: thegeeklab/link-validator
|
||||||
|
commands:
|
||||||
|
- link-validator -ro
|
||||||
|
environment:
|
||||||
|
LINK_VALIDATOR_BASE_DIR: docs/public
|
||||||
|
|
||||||
|
- name: build
|
||||||
|
image: thegeeklab/hugo:0.91.0
|
||||||
|
commands:
|
||||||
|
- hugo -s docs/
|
||||||
|
|
||||||
|
- name: beautify
|
||||||
|
image: node:lts-alpine
|
||||||
|
commands:
|
||||||
|
- npm install -g js-beautify
|
||||||
|
- html-beautify -r -f 'docs/public/**/*.html'
|
||||||
|
environment:
|
||||||
|
FORCE_COLOR: true
|
||||||
|
NPM_CONFIG_LOGLEVEL: error
|
||||||
|
|
||||||
|
- name: publish
|
||||||
|
image: plugins/s3-sync
|
||||||
|
settings:
|
||||||
|
access_key:
|
||||||
|
from_secret: s3_access_key
|
||||||
|
bucket: geekdocs
|
||||||
|
delete: true
|
||||||
|
endpoint: https://sp.rknet.org
|
||||||
|
path_style: true
|
||||||
|
secret_key:
|
||||||
|
from_secret: s3_secret_access_key
|
||||||
|
source: docs/public/
|
||||||
|
strip_prefix: docs/public/
|
||||||
|
target: /${DRONE_REPO_NAME}
|
||||||
|
when:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
- refs/pull/**
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- build-package
|
||||||
|
- build-container-amd64
|
||||||
|
- build-container-arm64
|
||||||
|
- build-container-arm
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: pipeline
|
||||||
|
name: notifications
|
||||||
|
|
||||||
|
platform:
|
||||||
|
os: linux
|
||||||
|
arch: amd64
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: manifest-dockerhub
|
||||||
|
image: plugins/manifest
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
ignore_missing: true
|
||||||
|
password:
|
||||||
|
from_secret: docker_password
|
||||||
|
spec: docker/manifest.tmpl
|
||||||
|
username:
|
||||||
|
from_secret: docker_username
|
||||||
|
when:
|
||||||
|
status:
|
||||||
|
- success
|
||||||
|
|
||||||
|
- name: manifest-quay
|
||||||
|
image: plugins/manifest
|
||||||
|
settings:
|
||||||
|
auto_tag: true
|
||||||
|
ignore_missing: true
|
||||||
|
password:
|
||||||
|
from_secret: quay_password
|
||||||
|
spec: docker/manifest-quay.tmpl
|
||||||
|
username:
|
||||||
|
from_secret: quay_username
|
||||||
|
when:
|
||||||
|
status:
|
||||||
|
- success
|
||||||
|
|
||||||
|
- name: pushrm-dockerhub
|
||||||
|
pull: always
|
||||||
|
image: chko/docker-pushrm:1
|
||||||
|
environment:
|
||||||
|
DOCKER_PASS:
|
||||||
|
from_secret: docker_password
|
||||||
|
DOCKER_USER:
|
||||||
|
from_secret: docker_username
|
||||||
|
PUSHRM_FILE: README.md
|
||||||
|
PUSHRM_SHORT: Annotation based documentation for your Ansible roles
|
||||||
|
PUSHRM_TARGET: thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
when:
|
||||||
|
status:
|
||||||
|
- success
|
||||||
|
|
||||||
|
- name: pushrm-quay
|
||||||
|
pull: always
|
||||||
|
image: chko/docker-pushrm:1
|
||||||
|
environment:
|
||||||
|
APIKEY__QUAY_IO:
|
||||||
|
from_secret: quay_token
|
||||||
|
PUSHRM_FILE: README.md
|
||||||
|
PUSHRM_TARGET: quay.io/thegeeklab/${DRONE_REPO_NAME}
|
||||||
|
when:
|
||||||
|
status:
|
||||||
|
- success
|
||||||
|
|
||||||
|
- name: matrix
|
||||||
|
image: thegeeklab/drone-matrix
|
||||||
|
settings:
|
||||||
|
homeserver:
|
||||||
|
from_secret: matrix_homeserver
|
||||||
|
password:
|
||||||
|
from_secret: matrix_password
|
||||||
|
roomid:
|
||||||
|
from_secret: matrix_roomid
|
||||||
|
template: "Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}"
|
||||||
|
username:
|
||||||
|
from_secret: matrix_username
|
||||||
|
when:
|
||||||
|
status:
|
||||||
|
- success
|
||||||
|
- failure
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
ref:
|
||||||
|
- refs/heads/main
|
||||||
|
- refs/tags/**
|
||||||
|
status:
|
||||||
|
- success
|
||||||
|
- failure
|
||||||
|
|
||||||
|
depends_on:
|
||||||
|
- docs
|
||||||
|
|
||||||
|
---
|
||||||
|
kind: signature
|
||||||
|
hmac: ba73c2d1d4b473b6e4cff2bc3d7e045a6ca530e0c1951fd13e798e038e23b554
|
||||||
|
|
||||||
|
...
|
9
.github/settings.yml
vendored
9
.github/settings.yml
vendored
@ -52,11 +52,6 @@ branches:
|
|||||||
required_status_checks:
|
required_status_checks:
|
||||||
strict: false
|
strict: false
|
||||||
contexts:
|
contexts:
|
||||||
- ci/woodpecker/pr/lint
|
- continuous-integration/drone/pr
|
||||||
- ci/woodpecker/pr/test
|
enforce_admins: null
|
||||||
- ci/woodpecker/pr/build-package
|
|
||||||
- ci/woodpecker/pr/build-container
|
|
||||||
- ci/woodpecker/pr/docs
|
|
||||||
enforce_admins: false
|
|
||||||
required_linear_history: true
|
|
||||||
restrictions: null
|
restrictions: null
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -106,8 +106,6 @@ pip-wheel-metadata
|
|||||||
docs/themes/
|
docs/themes/
|
||||||
docs/public/
|
docs/public/
|
||||||
resources/_gen/
|
resources/_gen/
|
||||||
.hugo_build.lock
|
|
||||||
|
|
||||||
# Misc
|
# Misc
|
||||||
CHANGELOG.md
|
CHANGELOG.md
|
||||||
.ruff_cache
|
|
||||||
|
@ -1,47 +0,0 @@
|
|||||||
---
|
|
||||||
version: "1.1"
|
|
||||||
|
|
||||||
versioning:
|
|
||||||
update-major: []
|
|
||||||
update-minor: [feat]
|
|
||||||
update-patch: [fix, perf, refactor, chore, test, ci, docs]
|
|
||||||
|
|
||||||
tag:
|
|
||||||
pattern: "v%d.%d.%d"
|
|
||||||
|
|
||||||
release-notes:
|
|
||||||
sections:
|
|
||||||
- name: Features
|
|
||||||
commit-types: [feat]
|
|
||||||
section-type: commits
|
|
||||||
- name: Bug Fixes
|
|
||||||
commit-types: [fix]
|
|
||||||
section-type: commits
|
|
||||||
- name: Performance Improvements
|
|
||||||
commit-types: [perf]
|
|
||||||
section-type: commits
|
|
||||||
- name: Code Refactoring
|
|
||||||
commit-types: [refactor]
|
|
||||||
section-type: commits
|
|
||||||
- name: Others
|
|
||||||
commit-types: [chore]
|
|
||||||
section-type: commits
|
|
||||||
- name: Testing
|
|
||||||
commit-types: [test]
|
|
||||||
section-type: commits
|
|
||||||
- name: CI Pipeline
|
|
||||||
commit-types: [ci]
|
|
||||||
section-type: commits
|
|
||||||
- name: Documentation
|
|
||||||
commit-types: [docs]
|
|
||||||
section-type: commits
|
|
||||||
- name: Breaking Changes
|
|
||||||
section-type: breaking-changes
|
|
||||||
|
|
||||||
commit-message:
|
|
||||||
footer:
|
|
||||||
issue:
|
|
||||||
key: issue
|
|
||||||
add-value-prefix: "#"
|
|
||||||
issue:
|
|
||||||
regex: "#?[0-9]+"
|
|
@ -1 +0,0 @@
|
|||||||
https://hub.docker.com/r/thegeeklab/*
|
|
@ -2,9 +2,5 @@
|
|||||||
default: True
|
default: True
|
||||||
MD013: False
|
MD013: False
|
||||||
MD041: False
|
MD041: False
|
||||||
MD024: False
|
|
||||||
MD004:
|
MD004:
|
||||||
style: dash
|
style: dash
|
||||||
MD033:
|
|
||||||
allowed_elements:
|
|
||||||
- "br"
|
|
||||||
|
@ -1,10 +0,0 @@
|
|||||||
---
|
|
||||||
- id: ansible-doctor
|
|
||||||
name: ansible-doctor
|
|
||||||
description: Create annotation based documentation for your Ansible roles.
|
|
||||||
entry: ansible-doctor -f -qqq
|
|
||||||
language: python
|
|
||||||
pass_filenames: False
|
|
||||||
always_run: True
|
|
||||||
additional_dependencies:
|
|
||||||
- .[ansible-core]
|
|
@ -1,2 +1,3 @@
|
|||||||
|
.drone.yml
|
||||||
*.tpl.md
|
*.tpl.md
|
||||||
LICENSE
|
LICENSE
|
||||||
|
@ -1,82 +0,0 @@
|
|||||||
---
|
|
||||||
when:
|
|
||||||
- event: [pull_request, tag]
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
image: docker.io/library/python:3.13
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry build
|
|
||||||
|
|
||||||
- name: security-build
|
|
||||||
image: quay.io/thegeeklab/wp-docker-buildx:5
|
|
||||||
depends_on: [build]
|
|
||||||
settings:
|
|
||||||
containerfile: Containerfile.multiarch
|
|
||||||
output: type=oci,dest=oci/${CI_REPO_NAME},tar=false
|
|
||||||
repo: ${CI_REPO}
|
|
||||||
|
|
||||||
- name: security-scan
|
|
||||||
image: docker.io/aquasec/trivy
|
|
||||||
depends_on: [security-build]
|
|
||||||
commands:
|
|
||||||
- trivy -v
|
|
||||||
- trivy image --input oci/${CI_REPO_NAME}
|
|
||||||
environment:
|
|
||||||
TRIVY_EXIT_CODE: "1"
|
|
||||||
TRIVY_IGNORE_UNFIXED: "true"
|
|
||||||
TRIVY_NO_PROGRESS: "true"
|
|
||||||
TRIVY_SEVERITY: HIGH,CRITICAL
|
|
||||||
TRIVY_TIMEOUT: 1m
|
|
||||||
TRIVY_DB_REPOSITORY: docker.io/aquasec/trivy-db:2
|
|
||||||
|
|
||||||
- name: publish-dockerhub
|
|
||||||
image: quay.io/thegeeklab/wp-docker-buildx:5
|
|
||||||
depends_on: [security-scan]
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
containerfile: Containerfile.multiarch
|
|
||||||
password:
|
|
||||||
from_secret: docker_password
|
|
||||||
platforms:
|
|
||||||
- linux/amd64
|
|
||||||
- linux/arm64
|
|
||||||
provenance: false
|
|
||||||
repo: ${CI_REPO}
|
|
||||||
username:
|
|
||||||
from_secret: docker_username
|
|
||||||
when:
|
|
||||||
- event: [tag]
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
|
|
||||||
- name: publish-quay
|
|
||||||
image: quay.io/thegeeklab/wp-docker-buildx:5
|
|
||||||
depends_on: security-scan
|
|
||||||
settings:
|
|
||||||
auto_tag: true
|
|
||||||
containerfile: Containerfile.multiarch
|
|
||||||
password:
|
|
||||||
from_secret: quay_password
|
|
||||||
platforms:
|
|
||||||
- linux/amd64
|
|
||||||
- linux/arm64
|
|
||||||
provenance: false
|
|
||||||
registry: quay.io
|
|
||||||
repo: quay.io/${CI_REPO}
|
|
||||||
username:
|
|
||||||
from_secret: quay_username
|
|
||||||
when:
|
|
||||||
- event: [tag]
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- lint
|
|
||||||
- test
|
|
@ -1,56 +0,0 @@
|
|||||||
---
|
|
||||||
when:
|
|
||||||
- event: [pull_request, tag]
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: build
|
|
||||||
image: docker.io/library/python:3.13
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry build
|
|
||||||
|
|
||||||
- name: checksum
|
|
||||||
image: quay.io/thegeeklab/alpine-tools
|
|
||||||
commands:
|
|
||||||
- cd dist/ && sha256sum * > ../sha256sum.txt
|
|
||||||
|
|
||||||
- name: changelog
|
|
||||||
image: quay.io/thegeeklab/git-sv
|
|
||||||
commands:
|
|
||||||
- git sv current-version
|
|
||||||
- git sv release-notes -t ${CI_COMMIT_TAG:-next} -o CHANGELOG.md
|
|
||||||
- cat CHANGELOG.md
|
|
||||||
|
|
||||||
- name: publish-github
|
|
||||||
image: docker.io/plugins/github-release
|
|
||||||
settings:
|
|
||||||
api_key:
|
|
||||||
from_secret: github_token
|
|
||||||
files:
|
|
||||||
- dist/*
|
|
||||||
- sha256sum.txt
|
|
||||||
note: CHANGELOG.md
|
|
||||||
overwrite: true
|
|
||||||
title: ${CI_COMMIT_TAG}
|
|
||||||
when:
|
|
||||||
- event: [tag]
|
|
||||||
|
|
||||||
- name: publish-pypi
|
|
||||||
image: docker.io/library/python:3.13
|
|
||||||
environment:
|
|
||||||
POETRY_HTTP_BASIC_PYPI_PASSWORD:
|
|
||||||
from_secret: pypi_password
|
|
||||||
POETRY_HTTP_BASIC_PYPI_USERNAME:
|
|
||||||
from_secret: pypi_username
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry publish -n
|
|
||||||
when:
|
|
||||||
- event: [tag]
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- lint
|
|
||||||
- test
|
|
@ -1,101 +0,0 @@
|
|||||||
---
|
|
||||||
when:
|
|
||||||
- event: [pull_request, tag]
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: assets
|
|
||||||
image: quay.io/thegeeklab/alpine-tools
|
|
||||||
commands:
|
|
||||||
- make doc
|
|
||||||
|
|
||||||
- name: markdownlint
|
|
||||||
image: quay.io/thegeeklab/markdownlint-cli
|
|
||||||
depends_on: [assets]
|
|
||||||
commands:
|
|
||||||
- markdownlint 'README.md' 'CONTRIBUTING.md'
|
|
||||||
|
|
||||||
- name: spellcheck
|
|
||||||
image: quay.io/thegeeklab/alpine-tools
|
|
||||||
depends_on: [assets]
|
|
||||||
commands:
|
|
||||||
- spellchecker --files 'docs/**/*.md' 'README.md' 'CONTRIBUTING.md' -d .dictionary -p spell indefinite-article syntax-urls
|
|
||||||
environment:
|
|
||||||
FORCE_COLOR: "true"
|
|
||||||
|
|
||||||
- name: link-validation
|
|
||||||
image: docker.io/lycheeverse/lychee
|
|
||||||
depends_on: [assets]
|
|
||||||
commands:
|
|
||||||
- lychee --no-progress --format detailed docs/content README.md
|
|
||||||
|
|
||||||
- name: build
|
|
||||||
image: quay.io/thegeeklab/hugo:0.136.5
|
|
||||||
depends_on: [link-validation]
|
|
||||||
commands:
|
|
||||||
- hugo --panicOnWarning -s docs/
|
|
||||||
|
|
||||||
- name: beautify
|
|
||||||
image: quay.io/thegeeklab/alpine-tools
|
|
||||||
depends_on: [build]
|
|
||||||
commands:
|
|
||||||
- html-beautify -r -f 'docs/public/**/*.html'
|
|
||||||
|
|
||||||
- name: publish
|
|
||||||
image: quay.io/thegeeklab/wp-s3-action
|
|
||||||
depends_on: [beautify]
|
|
||||||
settings:
|
|
||||||
access_key:
|
|
||||||
from_secret: s3_access_key
|
|
||||||
bucket: geekdocs
|
|
||||||
delete: true
|
|
||||||
endpoint:
|
|
||||||
from_secret: s3_endpoint
|
|
||||||
path_style: true
|
|
||||||
secret_key:
|
|
||||||
from_secret: s3_secret_access_key
|
|
||||||
source: docs/public/
|
|
||||||
strip_prefix: docs/public/
|
|
||||||
target: /${CI_REPO_NAME}
|
|
||||||
when:
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
status: [success, failure]
|
|
||||||
|
|
||||||
- name: pushrm-dockerhub
|
|
||||||
image: docker.io/chko/docker-pushrm:1
|
|
||||||
depends_on: [publish]
|
|
||||||
environment:
|
|
||||||
DOCKER_PASS:
|
|
||||||
from_secret: docker_password
|
|
||||||
DOCKER_USER:
|
|
||||||
from_secret: docker_username
|
|
||||||
PUSHRM_FILE: README.md
|
|
||||||
PUSHRM_SHORT: Annotation based documentation for your Ansible roles
|
|
||||||
PUSHRM_TARGET: ${CI_REPO}
|
|
||||||
when:
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
status: [success]
|
|
||||||
|
|
||||||
- name: pushrm-quay
|
|
||||||
image: docker.io/chko/docker-pushrm:1
|
|
||||||
depends_on: [publish]
|
|
||||||
environment:
|
|
||||||
APIKEY__QUAY_IO:
|
|
||||||
from_secret: quay_token
|
|
||||||
PUSHRM_FILE: README.md
|
|
||||||
PUSHRM_TARGET: quay.io/${CI_REPO}
|
|
||||||
when:
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
status: [success]
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- build-package
|
|
||||||
- build-container
|
|
@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
when:
|
|
||||||
- event: [pull_request, tag]
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: check-format
|
|
||||||
image: docker.io/library/python:3.13
|
|
||||||
depends_on: []
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run ruff format --check --diff ./${CI_REPO_NAME//-/}
|
|
||||||
environment:
|
|
||||||
PY_COLORS: "1"
|
|
||||||
|
|
||||||
- name: check-coding
|
|
||||||
image: docker.io/library/python:3.13
|
|
||||||
depends_on: []
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run ruff check ./${CI_REPO_NAME//-/}
|
|
||||||
environment:
|
|
||||||
PY_COLORS: "1"
|
|
||||||
|
|
||||||
- name: check-jinja
|
|
||||||
image: docker.io/library/python:3.13
|
|
||||||
depends_on: []
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry run j2lint ansibledoctor/templates/ -i jinja-statements-indentation jinja-statements-delimiter
|
|
||||||
environment:
|
|
||||||
PY_COLORS: "1"
|
|
@ -1,26 +0,0 @@
|
|||||||
---
|
|
||||||
when:
|
|
||||||
- event: [tag]
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
|
|
||||||
runs_on: [success, failure]
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: matrix
|
|
||||||
image: quay.io/thegeeklab/wp-matrix
|
|
||||||
settings:
|
|
||||||
homeserver:
|
|
||||||
from_secret: matrix_homeserver
|
|
||||||
room_id:
|
|
||||||
from_secret: matrix_room_id
|
|
||||||
user_id:
|
|
||||||
from_secret: matrix_user_id
|
|
||||||
access_token:
|
|
||||||
from_secret: matrix_access_token
|
|
||||||
when:
|
|
||||||
- status: [success, failure]
|
|
||||||
|
|
||||||
depends_on:
|
|
||||||
- docs
|
|
@ -1,34 +0,0 @@
|
|||||||
---
|
|
||||||
when:
|
|
||||||
- event: [pull_request, tag]
|
|
||||||
- event: [push, manual]
|
|
||||||
branch:
|
|
||||||
- ${CI_REPO_DEFAULT_BRANCH}
|
|
||||||
|
|
||||||
variables:
|
|
||||||
- &pytest_base
|
|
||||||
depends_on: []
|
|
||||||
commands:
|
|
||||||
- pip install poetry poetry-dynamic-versioning -qq
|
|
||||||
- poetry install -E ansible-core
|
|
||||||
- poetry version
|
|
||||||
- poetry run ${CI_REPO_NAME} --help
|
|
||||||
environment:
|
|
||||||
PY_COLORS: "1"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: python-313
|
|
||||||
image: docker.io/library/python:3.13
|
|
||||||
<<: *pytest_base
|
|
||||||
|
|
||||||
- name: python-312
|
|
||||||
image: docker.io/library/python:3.12
|
|
||||||
<<: *pytest_base
|
|
||||||
|
|
||||||
- name: python-311
|
|
||||||
image: docker.io/library/python:3.11
|
|
||||||
<<: *pytest_base
|
|
||||||
|
|
||||||
- name: python-310
|
|
||||||
image: docker.io/library/python:3.10
|
|
||||||
<<: *pytest_base
|
|
@ -3,7 +3,7 @@
|
|||||||
## Security
|
## Security
|
||||||
|
|
||||||
If you think you have found a **security issue**, please do not mention it in this repository.
|
If you think you have found a **security issue**, please do not mention it in this repository.
|
||||||
Instead, send an email to `security@thegeeklab.de` with as many details as possible so it can be handled confidential.
|
Instead, send an email to security@thegeeklab.de with as many details as possible so it can be handled confidential.
|
||||||
|
|
||||||
## Bug Reports and Feature Requests
|
## Bug Reports and Feature Requests
|
||||||
|
|
||||||
|
2
Makefile
2
Makefile
@ -1,5 +1,5 @@
|
|||||||
# renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc
|
# renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc
|
||||||
THEME_VERSION := v1.2.1
|
THEME_VERSION := v0.27.2
|
||||||
THEME := hugo-geekdoc
|
THEME := hugo-geekdoc
|
||||||
BASEDIR := docs
|
BASEDIR := docs
|
||||||
THEMEDIR := $(BASEDIR)/themes
|
THEMEDIR := $(BASEDIR)/themes
|
||||||
|
@ -2,7 +2,7 @@
|
|||||||
|
|
||||||
Annotation based documentation for your Ansible roles
|
Annotation based documentation for your Ansible roles
|
||||||
|
|
||||||
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
|
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-doctor?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-doctor)
|
||||||
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-doctor)
|
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-doctor)
|
||||||
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-doctor)
|
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-doctor)
|
||||||
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-doctor.svg)](https://pypi.org/project/ansible-doctor/)
|
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-doctor.svg)](https://pypi.org/project/ansible-doctor/)
|
||||||
@ -22,7 +22,7 @@ The full documentation is available at [https://ansible-doctor.geekdocs.de](http
|
|||||||
|
|
||||||
## Contributors
|
## Contributors
|
||||||
|
|
||||||
Special thanks to all [contributors](https://github.com/thegeeklab/ansible-doctor/graphs/contributors). If you would like to contribute,
|
Special thanks goes to all [contributors](https://github.com/thegeeklab/ansible-doctor/graphs/contributors). If you would like to contribute,
|
||||||
please see the [instructions](https://github.com/thegeeklab/ansible-doctor/blob/main/CONTRIBUTING.md).
|
please see the [instructions](https://github.com/thegeeklab/ansible-doctor/blob/main/CONTRIBUTING.md).
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
@ -1,10 +1,3 @@
|
|||||||
"""Provide version information."""
|
"""Default package."""
|
||||||
|
|
||||||
__version__ = "0.0.0"
|
__version__ = "0.0.0"
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible # noqa
|
|
||||||
except ImportError:
|
|
||||||
sys.exit("ERROR: Python requirements are missing: 'ansible-core' not found.")
|
|
||||||
|
@ -6,10 +6,9 @@ import re
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import anyconfig
|
import anyconfig
|
||||||
import structlog
|
|
||||||
|
|
||||||
from ansibledoctor.config import SingleConfig
|
from ansibledoctor.config import SingleConfig
|
||||||
from ansibledoctor.utils import _split_string, sysexit_with_message
|
from ansibledoctor.utils import SingleLog
|
||||||
|
|
||||||
|
|
||||||
class AnnotationItem:
|
class AnnotationItem:
|
||||||
@ -21,11 +20,9 @@ class AnnotationItem:
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Beautify object string output."""
|
"""Beautify object string output."""
|
||||||
for key in self.data:
|
for key in self.data.keys():
|
||||||
for sub in self.data.get(key):
|
for sub in self.data.get(key):
|
||||||
return f"AnnotationItem({key}: {sub})"
|
return "AnnotationItem({}: {})".format(key, sub)
|
||||||
|
|
||||||
return "None"
|
|
||||||
|
|
||||||
def get_obj(self):
|
def get_obj(self):
|
||||||
return self.data
|
return self.data
|
||||||
@ -38,12 +35,13 @@ class Annotation:
|
|||||||
self._all_items = defaultdict(dict)
|
self._all_items = defaultdict(dict)
|
||||||
self._file_handler = None
|
self._file_handler = None
|
||||||
self.config = SingleConfig()
|
self.config = SingleConfig()
|
||||||
self.log = structlog.get_logger()
|
self.log = SingleLog()
|
||||||
|
self.logger = self.log.logger
|
||||||
self._files_registry = files_registry
|
self._files_registry = files_registry
|
||||||
|
|
||||||
self._all_annotations = self.config.get_annotations_definition()
|
self._all_annotations = self.config.get_annotations_definition()
|
||||||
|
|
||||||
if name in self._all_annotations:
|
if name in self._all_annotations.keys():
|
||||||
self._annotation_definition = self._all_annotations[name]
|
self._annotation_definition = self._all_annotations[name]
|
||||||
|
|
||||||
if self._annotation_definition is not None:
|
if self._annotation_definition is not None:
|
||||||
@ -55,23 +53,26 @@ class Annotation:
|
|||||||
def _find_annotation(self):
|
def _find_annotation(self):
|
||||||
regex = r"(\#\ *\@" + self._annotation_definition["name"] + r"\ +.*)"
|
regex = r"(\#\ *\@" + self._annotation_definition["name"] + r"\ +.*)"
|
||||||
for rfile in self._files_registry.get_files():
|
for rfile in self._files_registry.get_files():
|
||||||
with open(rfile, encoding="utf8") as self._file_handler:
|
self._file_handler = open(rfile, encoding="utf8")
|
||||||
num = 1
|
|
||||||
while True:
|
|
||||||
line = self._file_handler.readline()
|
|
||||||
if not line:
|
|
||||||
break
|
|
||||||
|
|
||||||
if re.match(regex, line.strip()):
|
num = 1
|
||||||
item = self._get_annotation_data(
|
while True:
|
||||||
num, line, self._annotation_definition["name"], rfile
|
line = self._file_handler.readline()
|
||||||
|
if not line:
|
||||||
|
break
|
||||||
|
|
||||||
|
if re.match(regex, line.strip()):
|
||||||
|
item = self._get_annotation_data(
|
||||||
|
num, line, self._annotation_definition["name"], rfile
|
||||||
|
)
|
||||||
|
if item:
|
||||||
|
self.logger.info(str(item))
|
||||||
|
self._populate_item(
|
||||||
|
item.get_obj().items(), self._annotation_definition["name"]
|
||||||
)
|
)
|
||||||
if item:
|
num += 1
|
||||||
self.log.info(f"Found {item!s}")
|
|
||||||
self._populate_item(
|
self._file_handler.close()
|
||||||
item.get_obj().items(), self._annotation_definition["name"]
|
|
||||||
)
|
|
||||||
num += 1
|
|
||||||
|
|
||||||
def _populate_item(self, item, name):
|
def _populate_item(self, item, name):
|
||||||
allow_multiple = self.config.ANNOTATIONS.get(name)["allow_multiple"]
|
allow_multiple = self.config.ANNOTATIONS.get(name)["allow_multiple"]
|
||||||
@ -85,7 +86,9 @@ class Annotation:
|
|||||||
try:
|
try:
|
||||||
anyconfig.merge(self._all_items[key], value, ac_merge=anyconfig.MS_DICTS)
|
anyconfig.merge(self._all_items[key], value, ac_merge=anyconfig.MS_DICTS)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
sysexit_with_message("Failed to merge annotation values", error=e)
|
self.log.sysexit_with_message(
|
||||||
|
"Unable to merge annotation values:\n{}".format(e)
|
||||||
|
)
|
||||||
|
|
||||||
def _get_annotation_data(self, num, line, name, rfile):
|
def _get_annotation_data(self, num, line, name, rfile):
|
||||||
"""
|
"""
|
||||||
@ -100,26 +103,26 @@ class Annotation:
|
|||||||
line1 = re.sub(reg1, "", line).strip()
|
line1 = re.sub(reg1, "", line).strip()
|
||||||
|
|
||||||
# step3 take the main key value from the annotation
|
# step3 take the main key value from the annotation
|
||||||
parts = [part.strip() for part in _split_string(line1, ":", "\\", 2)]
|
parts = [part.strip() for part in line1.split(":", 2)]
|
||||||
key = str(parts[0])
|
key = str(parts[0])
|
||||||
item.data[key] = {}
|
item.data[key] = {}
|
||||||
multiline_char = [">", "$>"]
|
multiline_char = [">", "$>"]
|
||||||
|
|
||||||
if len(parts) < 2:
|
if len(parts) < 2:
|
||||||
return None
|
return
|
||||||
|
|
||||||
if len(parts) == 2:
|
if len(parts) == 2:
|
||||||
parts = parts[:1] + ["value"] + parts[1:]
|
parts = parts[:1] + ["value"] + parts[1:]
|
||||||
|
|
||||||
subtypes = self.config.ANNOTATIONS.get(name)["subtypes"]
|
subtypes = self.config.ANNOTATIONS.get(name)["subtypes"]
|
||||||
if subtypes and parts[1] not in subtypes:
|
if subtypes and parts[1] not in subtypes:
|
||||||
return None
|
return
|
||||||
|
|
||||||
content = [parts[2]]
|
content = [parts[2]]
|
||||||
|
|
||||||
if parts[2] not in multiline_char and parts[2].startswith("$"):
|
if parts[2] not in multiline_char and parts[2].startswith("$"):
|
||||||
source = parts[2].replace("$", "").strip()
|
source = parts[2].replace("$", "").strip()
|
||||||
content = self._str_to_json(key, source, rfile, num)
|
content = self._str_to_json(key, source, rfile, num, line)
|
||||||
|
|
||||||
item.data[key][parts[1]] = content
|
item.data[key][parts[1]] = content
|
||||||
|
|
||||||
@ -128,8 +131,7 @@ class Annotation:
|
|||||||
multiline = []
|
multiline = []
|
||||||
stars_with_annotation = r"(\#\ *[\@][\w]+)"
|
stars_with_annotation = r"(\#\ *[\@][\w]+)"
|
||||||
current_file_position = self._file_handler.tell()
|
current_file_position = self._file_handler.tell()
|
||||||
before = ""
|
newline = ""
|
||||||
after = ""
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
next_line = self._file_handler.readline().lstrip()
|
next_line = self._file_handler.readline().lstrip()
|
||||||
@ -152,34 +154,31 @@ class Annotation:
|
|||||||
final = re.findall(r"\#(.*)", next_line)[0].rstrip()
|
final = re.findall(r"\#(.*)", next_line)[0].rstrip()
|
||||||
if final[:1] == " ":
|
if final[:1] == " ":
|
||||||
final = final[1:]
|
final = final[1:]
|
||||||
final = before + final
|
final = newline + final
|
||||||
|
|
||||||
# match if empty line or commented empty line
|
# match if empty line or commented empty line
|
||||||
test_line = next_line.replace("#", "").strip()
|
test_line = next_line.replace("#", "").strip()
|
||||||
if len(test_line) == 0:
|
if len(test_line) == 0:
|
||||||
before = "\n\n"
|
newline = "\n\n"
|
||||||
continue
|
continue
|
||||||
before = ""
|
|
||||||
|
|
||||||
if test_line.endswith("\\"):
|
|
||||||
final = final.rstrip("\\").strip()
|
|
||||||
after = "\n"
|
|
||||||
else:
|
else:
|
||||||
after = ""
|
newline = ""
|
||||||
|
|
||||||
multiline.append(before + final + after)
|
multiline.append(newline + final)
|
||||||
|
|
||||||
if parts[2].startswith("$"):
|
if parts[2].startswith("$"):
|
||||||
source = "".join([x.strip() for x in multiline])
|
source = "".join([x.strip() for x in multiline])
|
||||||
multiline = self._str_to_json(key, source, rfile, num)
|
multiline = self._str_to_json(key, source, rfile, num, line)
|
||||||
|
|
||||||
item.data[key][parts[1]] = multiline
|
item.data[key][parts[1]] = multiline
|
||||||
return item
|
return item
|
||||||
|
|
||||||
def _str_to_json(self, key, string, rfile, num):
|
def _str_to_json(self, key, string, rfile, num, line):
|
||||||
try:
|
try:
|
||||||
return {key: json.loads(string)}
|
return {key: json.loads(string)}
|
||||||
except ValueError:
|
except ValueError:
|
||||||
sysexit_with_message(
|
self.log.sysexit_with_message(
|
||||||
f"ValueError: Failed to parse json in {rfile}:{num!s}", file=rfile
|
"Json value error: Can't parse json in {}:{}:\n{}".format(
|
||||||
|
rfile, str(num), line.strip()
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
@ -2,36 +2,29 @@
|
|||||||
"""Entrypoint and CLI handler."""
|
"""Entrypoint and CLI handler."""
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import os
|
|
||||||
|
|
||||||
import structlog
|
|
||||||
|
|
||||||
import ansibledoctor.exception
|
import ansibledoctor.exception
|
||||||
from ansibledoctor import __version__
|
from ansibledoctor import __version__
|
||||||
from ansibledoctor.config import SingleConfig
|
from ansibledoctor.config import SingleConfig
|
||||||
from ansibledoctor.doc_generator import Generator
|
from ansibledoctor.doc_generator import Generator
|
||||||
from ansibledoctor.doc_parser import Parser
|
from ansibledoctor.doc_parser import Parser
|
||||||
from ansibledoctor.utils import sysexit_with_message
|
from ansibledoctor.utils import SingleLog
|
||||||
|
|
||||||
|
|
||||||
class AnsibleDoctor:
|
class AnsibleDoctor:
|
||||||
"""Create main object."""
|
"""Main doctor object."""
|
||||||
|
|
||||||
log = structlog.get_logger()
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
try:
|
self.log = SingleLog()
|
||||||
self.config = SingleConfig()
|
self.logger = self.log.logger
|
||||||
self.config.load(args=self._parse_args())
|
self.args = self._cli_args()
|
||||||
self._execute()
|
self.config = self._get_config()
|
||||||
except ansibledoctor.exception.DoctorError as e:
|
|
||||||
sysexit_with_message(e)
|
|
||||||
except FileNotFoundError as e:
|
|
||||||
sysexit_with_message("Base directory not found", path=e.filename)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
sysexit_with_message("Aborted...")
|
|
||||||
|
|
||||||
def _parse_args(self):
|
doc_parser = Parser()
|
||||||
|
doc_generator = Generator(doc_parser)
|
||||||
|
doc_generator.render()
|
||||||
|
|
||||||
|
def _cli_args(self):
|
||||||
"""
|
"""
|
||||||
Use argparse for parsing CLI arguments.
|
Use argparse for parsing CLI arguments.
|
||||||
|
|
||||||
@ -42,106 +35,72 @@ class AnsibleDoctor:
|
|||||||
description="Generate documentation from annotated Ansible roles using templates"
|
description="Generate documentation from annotated Ansible roles using templates"
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"base_dir",
|
"role_dir", nargs="?", help="role directory (default: current working dir)"
|
||||||
nargs="?",
|
|
||||||
default=self.config.config.base_dir,
|
|
||||||
help="base directory (default: current working directory)",
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-c",
|
"-c", "--config", dest="config_file", help="location of configuration file"
|
||||||
"--config",
|
|
||||||
dest="config_file",
|
|
||||||
help="path to configuration file",
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-o",
|
"-o", "--output", dest="output_dir", action="store", help="output base dir"
|
||||||
"--output",
|
|
||||||
dest="renderer__dest",
|
|
||||||
action="store",
|
|
||||||
default=self.config.config.renderer.dest,
|
|
||||||
help="output directory",
|
|
||||||
metavar="OUTPUT_DIR",
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
"-r",
|
|
||||||
"--recursive",
|
|
||||||
dest="recursive",
|
|
||||||
action="store_true",
|
|
||||||
default=self.config.config.recursive,
|
|
||||||
help="run recursively over the base directory subfolders",
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-f",
|
"-f",
|
||||||
"--force",
|
"--force",
|
||||||
dest="renderer.force_overwrite",
|
dest="force_overwrite",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=self.config.config.renderer.force_overwrite,
|
default=None,
|
||||||
help="force overwrite output file",
|
help="force overwrite output file"
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-d",
|
"-d",
|
||||||
"--dry-run",
|
"--dry-run",
|
||||||
dest="dry_run",
|
dest="dry_run",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
default=self.config.config.dry_run,
|
default=None,
|
||||||
help="dry run without writing",
|
help="dry run without writing"
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-n",
|
"-n",
|
||||||
"--no-role-detection",
|
"--no-role-detection",
|
||||||
dest="role_detection",
|
dest="role_detection",
|
||||||
action="store_false",
|
action="store_false",
|
||||||
default=self.config.config.role.autodetect,
|
default=None,
|
||||||
help="disable automatic role detection",
|
help="disable automatic role detection"
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-v",
|
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level"
|
||||||
dest="logging.level",
|
|
||||||
action="append_const",
|
|
||||||
const=-1,
|
|
||||||
help="increase log level",
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"-q",
|
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level"
|
||||||
dest="logging.level",
|
|
||||||
action="append_const",
|
|
||||||
const=1,
|
|
||||||
help="decrease log level",
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
"--version",
|
"--version", action="version", version="%(prog)s {}".format(__version__)
|
||||||
action="version",
|
|
||||||
version=f"%(prog)s {__version__}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return parser.parse_args().__dict__
|
return parser.parse_args().__dict__
|
||||||
|
|
||||||
def _execute(self):
|
def _get_config(self):
|
||||||
cwd = os.path.abspath(self.config.config.base_dir)
|
try:
|
||||||
walkdirs = [cwd]
|
config = SingleConfig(args=self.args)
|
||||||
|
except ansibledoctor.exception.ConfigError as e:
|
||||||
|
self.log.sysexit_with_message(e)
|
||||||
|
|
||||||
if self.config.config.recursive:
|
try:
|
||||||
walkdirs = [f.path for f in os.scandir(cwd) if f.is_dir()]
|
self.log.set_level(config.config["logging"]["level"])
|
||||||
|
except ValueError as e:
|
||||||
|
self.log.sysexit_with_message("Can not set log level.\n{}".format(str(e)))
|
||||||
|
|
||||||
for item in walkdirs:
|
if config.config["role_detection"]:
|
||||||
os.chdir(item)
|
if config.is_role:
|
||||||
self.config.load(root_path=os.getcwd())
|
self.logger.info("Ansible role detected")
|
||||||
|
|
||||||
self.log.debug("Switch working directory", path=item)
|
|
||||||
self.log.info("Lookup config file", path=self.config.config_files)
|
|
||||||
|
|
||||||
if self.config.config.role.autodetect:
|
|
||||||
if self.config.is_role():
|
|
||||||
structlog.contextvars.bind_contextvars(role=self.config.config.role_name)
|
|
||||||
self.log.info("Ansible role detected")
|
|
||||||
else:
|
|
||||||
sysexit_with_message("No Ansible role detected")
|
|
||||||
else:
|
else:
|
||||||
self.log.info("Ansible role detection disabled")
|
self.log.sysexit_with_message("No Ansible role detected")
|
||||||
|
else:
|
||||||
|
self.logger.info("Ansible role detection disabled")
|
||||||
|
|
||||||
doc_parser = Parser()
|
self.logger.info("Using config file {}".format(config.config_file))
|
||||||
doc_generator = Generator(doc_parser)
|
|
||||||
doc_generator.render()
|
return config
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -1,226 +1,303 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""Global settings definition."""
|
"""Global settings definition."""
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
import colorama
|
import anyconfig
|
||||||
import structlog
|
import environs
|
||||||
|
import jsonschema.exceptions
|
||||||
|
import ruamel.yaml
|
||||||
from appdirs import AppDirs
|
from appdirs import AppDirs
|
||||||
from dynaconf import Dynaconf, ValidationError, Validator
|
from jsonschema._utils import format_as_index
|
||||||
|
|
||||||
import ansibledoctor.exception
|
import ansibledoctor.exception
|
||||||
from ansibledoctor.utils import Singleton
|
from ansibledoctor.utils import Singleton
|
||||||
|
|
||||||
|
config_dir = AppDirs("ansible-doctor").user_config_dir
|
||||||
|
default_config_file = os.path.join(config_dir, "config.yml")
|
||||||
|
|
||||||
class Config:
|
|
||||||
"""Create configuration object."""
|
class Config():
|
||||||
|
"""
|
||||||
|
Create an object with all necessary settings.
|
||||||
|
|
||||||
|
Settings are loade from multiple locations in defined order (last wins):
|
||||||
|
- default settings defined by `self._get_defaults()`
|
||||||
|
- yaml config file, defaults to OS specific user config dir (https://pypi.org/project/appdirs/)
|
||||||
|
- provides cli parameters
|
||||||
|
"""
|
||||||
|
|
||||||
|
SETTINGS = {
|
||||||
|
"config_file": {
|
||||||
|
"default": "",
|
||||||
|
"env": "CONFIG_FILE",
|
||||||
|
"type": environs.Env().str
|
||||||
|
},
|
||||||
|
"role_dir": {
|
||||||
|
"default": "",
|
||||||
|
"env": "ROLE_DIR",
|
||||||
|
"type": environs.Env().str
|
||||||
|
},
|
||||||
|
"role_name": {
|
||||||
|
"default": "",
|
||||||
|
"env": "ROLE_NAME",
|
||||||
|
"type": environs.Env().str
|
||||||
|
},
|
||||||
|
"dry_run": {
|
||||||
|
"default": False,
|
||||||
|
"env": "DRY_RUN",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().bool
|
||||||
|
},
|
||||||
|
"logging.level": {
|
||||||
|
"default": "WARNING",
|
||||||
|
"env": "LOG_LEVEL",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().str
|
||||||
|
},
|
||||||
|
"logging.json": {
|
||||||
|
"default": False,
|
||||||
|
"env": "LOG_JSON",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().bool
|
||||||
|
},
|
||||||
|
"output_dir": {
|
||||||
|
"default": os.getcwd(),
|
||||||
|
"env": "OUTPUT_DIR",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().str
|
||||||
|
},
|
||||||
|
"template_dir": {
|
||||||
|
"default": os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates"),
|
||||||
|
"env": "TEMPLATE_DIR",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().str
|
||||||
|
},
|
||||||
|
"template": {
|
||||||
|
"default": "readme",
|
||||||
|
"env": "TEMPLATE",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().str
|
||||||
|
},
|
||||||
|
"force_overwrite": {
|
||||||
|
"default": False,
|
||||||
|
"env": "FORCE_OVERWRITE",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().bool
|
||||||
|
},
|
||||||
|
"custom_header": {
|
||||||
|
"default": "",
|
||||||
|
"env": "CUSTOM_HEADER",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().str
|
||||||
|
},
|
||||||
|
"exclude_files": {
|
||||||
|
"default": [],
|
||||||
|
"env": "EXCLUDE_FILES",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().list
|
||||||
|
},
|
||||||
|
"role_detection": {
|
||||||
|
"default": True,
|
||||||
|
"env": "ROLE_DETECTION",
|
||||||
|
"file": True,
|
||||||
|
"type": environs.Env().bool
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
ANNOTATIONS = {
|
ANNOTATIONS = {
|
||||||
"meta": {
|
"meta": {
|
||||||
"name": "meta",
|
"name": "meta",
|
||||||
"automatic": True,
|
"automatic": True,
|
||||||
"subtypes": ["value"],
|
"subtypes": ["value"],
|
||||||
"allow_multiple": False,
|
"allow_multiple": False
|
||||||
},
|
},
|
||||||
"todo": {
|
"todo": {
|
||||||
"name": "todo",
|
"name": "todo",
|
||||||
"automatic": True,
|
"automatic": True,
|
||||||
"subtypes": ["value"],
|
"subtypes": ["value"],
|
||||||
"allow_multiple": True,
|
"allow_multiple": True
|
||||||
},
|
},
|
||||||
"var": {
|
"var": {
|
||||||
"name": "var",
|
"name": "var",
|
||||||
"automatic": True,
|
"automatic": True,
|
||||||
"subtypes": ["value", "example", "description", "type", "deprecated"],
|
"subtypes": ["value", "example", "description"],
|
||||||
"allow_multiple": False,
|
"allow_multiple": False
|
||||||
},
|
},
|
||||||
"example": {
|
"example": {
|
||||||
"name": "example",
|
"name": "example",
|
||||||
"automatic": True,
|
"automatic": True,
|
||||||
"subtypes": [],
|
"subtypes": [],
|
||||||
"allow_multiple": False,
|
"allow_multiple": False
|
||||||
},
|
},
|
||||||
"tag": {
|
"tag": {
|
||||||
"name": "tag",
|
"name": "tag",
|
||||||
"automatic": True,
|
"automatic": True,
|
||||||
"subtypes": ["value", "description"],
|
"subtypes": ["value", "description"],
|
||||||
"allow_multiple": False,
|
"allow_multiple": False
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self, args={}):
|
||||||
self.config_files = [
|
"""
|
||||||
os.path.join(AppDirs("ansible-doctor").user_config_dir, "config.yml"),
|
Initialize a new settings class.
|
||||||
".ansibledoctor",
|
|
||||||
".ansibledoctor.yml",
|
|
||||||
".ansibledoctor.yaml",
|
|
||||||
]
|
|
||||||
self.config_merge = True
|
|
||||||
self.args = {}
|
|
||||||
self.load()
|
|
||||||
|
|
||||||
def load(self, root_path=None, args=None):
|
:param args: An optional dict of options, arguments and commands from the CLI.
|
||||||
tmpl_src = os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates")
|
:param config_file: An optional path to a yaml config file.
|
||||||
tmpl_provider = ["local", "git"]
|
:returns: None
|
||||||
|
|
||||||
if args:
|
"""
|
||||||
if args.get("config_file"):
|
self._args = args
|
||||||
self.config_merge = False
|
self._schema = None
|
||||||
self.config_files = [os.path.abspath(args.get("config_file"))]
|
self.config_file = default_config_file
|
||||||
args.pop("config_file")
|
self.role_dir = os.getcwd()
|
||||||
|
self.config = None
|
||||||
|
self._set_config()
|
||||||
|
self.is_role = self._set_is_role() or False
|
||||||
|
|
||||||
self.args = args
|
def _get_args(self, args):
|
||||||
|
cleaned = dict(filter(lambda item: item[1] is not None, args.items()))
|
||||||
|
|
||||||
self.config = Dynaconf(
|
normalized = {}
|
||||||
envvar_prefix="ANSIBLE_DOCTOR",
|
for key, value in cleaned.items():
|
||||||
merge_enabled=self.config_merge,
|
normalized = self._add_dict_branch(normalized, key.split("."), value)
|
||||||
core_loaders=["YAML"],
|
|
||||||
root_path=root_path,
|
|
||||||
settings_files=self.config_files,
|
|
||||||
fresh_vars=["base_dir", "output_dir"],
|
|
||||||
validators=[
|
|
||||||
Validator(
|
|
||||||
"base_dir",
|
|
||||||
default=os.getcwd(),
|
|
||||||
apply_default_on_none=True,
|
|
||||||
is_type_of=str,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"dry_run",
|
|
||||||
default=False,
|
|
||||||
is_type_of=bool,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"recursive",
|
|
||||||
default=False,
|
|
||||||
is_type_of=bool,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"exclude_files",
|
|
||||||
default=[],
|
|
||||||
is_type_of=list,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"exclude_tags",
|
|
||||||
default=[],
|
|
||||||
is_type_of=list,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"role.name",
|
|
||||||
is_type_of=str,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"role.autodetect",
|
|
||||||
default=True,
|
|
||||||
is_type_of=bool,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"logging.level",
|
|
||||||
default="WARNING",
|
|
||||||
is_in=[
|
|
||||||
"DEBUG",
|
|
||||||
"INFO",
|
|
||||||
"WARNING",
|
|
||||||
"ERROR",
|
|
||||||
"CRITICAL",
|
|
||||||
"debug",
|
|
||||||
"info",
|
|
||||||
"warning",
|
|
||||||
"error",
|
|
||||||
"critical",
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"logging.json",
|
|
||||||
default=False,
|
|
||||||
is_type_of=bool,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"recursive",
|
|
||||||
default=False,
|
|
||||||
is_type_of=bool,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"template.src",
|
|
||||||
default=f"local>{tmpl_src}",
|
|
||||||
is_type_of=str,
|
|
||||||
condition=lambda x: re.match(r"^(local|git)\s*>\s*", x),
|
|
||||||
messages={
|
|
||||||
"condition": f"Template provider must be one of {tmpl_provider}.",
|
|
||||||
},
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"template.name",
|
|
||||||
default="readme",
|
|
||||||
is_type_of=str,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"template.options.tabulate_variables",
|
|
||||||
default=False,
|
|
||||||
is_type_of=bool,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"renderer.autotrim",
|
|
||||||
default=True,
|
|
||||||
is_type_of=bool,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"renderer.include_header",
|
|
||||||
default="",
|
|
||||||
is_type_of=str,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"renderer.dest",
|
|
||||||
default=os.path.relpath(os.getcwd()),
|
|
||||||
is_type_of=str,
|
|
||||||
),
|
|
||||||
Validator(
|
|
||||||
"renderer.force_overwrite",
|
|
||||||
default=False,
|
|
||||||
is_type_of=bool,
|
|
||||||
),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
|
|
||||||
self.validate()
|
|
||||||
|
|
||||||
# Override correct log level from argparse
|
# Override correct log level from argparse
|
||||||
levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
levels = ["DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"]
|
||||||
log_level = levels.index(self.config.logging.level.upper())
|
log_level = levels.index(self.SETTINGS["logging.level"]["default"])
|
||||||
if self.args.get("logging.level") and isinstance(self.args["logging.level"], list):
|
if normalized.get("logging"):
|
||||||
for lvl in self.args["logging.level"]:
|
for adjustment in normalized["logging"]["level"]:
|
||||||
log_level = min(len(levels) - 1, max(log_level + lvl, 0))
|
log_level = min(len(levels) - 1, max(log_level + adjustment, 0))
|
||||||
|
normalized["logging"]["level"] = levels[log_level]
|
||||||
|
|
||||||
self.args["logging__level"] = levels[log_level]
|
return normalized
|
||||||
|
|
||||||
if root_path:
|
def _get_defaults(self):
|
||||||
self.args["base_dir"] = root_path
|
normalized = {}
|
||||||
|
for key, item in self.SETTINGS.items():
|
||||||
|
normalized = self._add_dict_branch(normalized, key.split("."), item["default"])
|
||||||
|
|
||||||
self.config.update(self.args)
|
# compute role_name default
|
||||||
self.validate()
|
normalized["role_name"] = os.path.basename(self.role_dir)
|
||||||
|
|
||||||
self._init_logger()
|
self.schema = anyconfig.gen_schema(normalized)
|
||||||
|
return normalized
|
||||||
|
|
||||||
def validate(self):
|
def _get_envs(self):
|
||||||
|
normalized = {}
|
||||||
|
for key, item in self.SETTINGS.items():
|
||||||
|
if item.get("env"):
|
||||||
|
prefix = "ANSIBLE_DOCTOR_"
|
||||||
|
envname = prefix + item["env"]
|
||||||
|
try:
|
||||||
|
value = item["type"](envname)
|
||||||
|
normalized = self._add_dict_branch(normalized, key.split("."), value)
|
||||||
|
except environs.EnvError as e:
|
||||||
|
if '"{}" not set'.format(envname) in str(e):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise ansibledoctor.exception.ConfigError(
|
||||||
|
"Unable to read environment variable", str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
return normalized
|
||||||
|
|
||||||
|
def _set_config(self):
|
||||||
|
args = self._get_args(self._args)
|
||||||
|
envs = self._get_envs()
|
||||||
|
defaults = self._get_defaults()
|
||||||
|
|
||||||
|
# preset config file path
|
||||||
|
if envs.get("config_file"):
|
||||||
|
self.config_file = self._normalize_path(envs.get("config_file"))
|
||||||
|
if envs.get("role_dir"):
|
||||||
|
self.role_dir = self._normalize_path(envs.get("role_dir"))
|
||||||
|
|
||||||
|
if args.get("config_file"):
|
||||||
|
self.config_file = self._normalize_path(args.get("config_file"))
|
||||||
|
if args.get("role_dir"):
|
||||||
|
self.role_dir = self._normalize_path(args.get("role_dir"))
|
||||||
|
|
||||||
|
source_files = []
|
||||||
|
source_files.append(self.config_file)
|
||||||
|
source_files.append(os.path.join(os.getcwd(), ".ansibledoctor"))
|
||||||
|
source_files.append(os.path.join(os.getcwd(), ".ansibledoctor.yml"))
|
||||||
|
source_files.append(os.path.join(os.getcwd(), ".ansibledoctor.yaml"))
|
||||||
|
|
||||||
|
for config in source_files:
|
||||||
|
if config and os.path.exists(config):
|
||||||
|
with open(config, "r", encoding="utf8") as stream:
|
||||||
|
s = stream.read()
|
||||||
|
try:
|
||||||
|
file_dict = ruamel.yaml.safe_load(s)
|
||||||
|
except (
|
||||||
|
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
|
||||||
|
) as e:
|
||||||
|
message = "{} {}".format(e.context, e.problem)
|
||||||
|
raise ansibledoctor.exception.ConfigError(
|
||||||
|
"Unable to read config file {}".format(config), message
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._validate(file_dict):
|
||||||
|
anyconfig.merge(defaults, file_dict, ac_merge=anyconfig.MS_DICTS)
|
||||||
|
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
|
||||||
|
|
||||||
|
if self._validate(envs):
|
||||||
|
anyconfig.merge(defaults, envs, ac_merge=anyconfig.MS_DICTS)
|
||||||
|
|
||||||
|
if self._validate(args):
|
||||||
|
anyconfig.merge(defaults, args, ac_merge=anyconfig.MS_DICTS)
|
||||||
|
|
||||||
|
fix_files = ["output_dir", "template_dir", "custom_header"]
|
||||||
|
for file in fix_files:
|
||||||
|
if defaults[file] and defaults[file] != "":
|
||||||
|
defaults[file] = self._normalize_path(defaults[file])
|
||||||
|
|
||||||
|
if "config_file" in defaults:
|
||||||
|
defaults.pop("config_file")
|
||||||
|
if "role_dir" in defaults:
|
||||||
|
defaults.pop("role_dir")
|
||||||
|
|
||||||
|
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
|
||||||
|
|
||||||
|
self.config = defaults
|
||||||
|
|
||||||
|
def _normalize_path(self, path):
|
||||||
|
if not os.path.isabs(path):
|
||||||
|
base = os.path.join(os.getcwd(), path)
|
||||||
|
return os.path.abspath(os.path.expanduser(os.path.expandvars(base)))
|
||||||
|
else:
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _set_is_role(self):
|
||||||
|
if os.path.isdir(os.path.join(self.role_dir, "tasks")):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _validate(self, config):
|
||||||
try:
|
try:
|
||||||
self.config.validators.validate_all()
|
anyconfig.validate(config, self.schema, ac_schema_safe=False)
|
||||||
except ValidationError as e:
|
except jsonschema.exceptions.ValidationError as e:
|
||||||
raise ansibledoctor.exception.ConfigError("Configuration error", e.message) from e
|
schema_error = "Failed validating '{validator}' in schema{schema}\n{message}".format(
|
||||||
|
validator=e.validator,
|
||||||
|
schema=format_as_index(list(e.relative_schema_path)[:-1]),
|
||||||
|
message=e.message
|
||||||
|
)
|
||||||
|
raise ansibledoctor.exception.ConfigError("Configuration error", schema_error)
|
||||||
|
|
||||||
def is_role(self):
|
return True
|
||||||
self.config.role_name = self.config.get(
|
|
||||||
"role_name", os.path.basename(self.config.base_dir)
|
def _add_dict_branch(self, tree, vector, value):
|
||||||
)
|
key = vector[0]
|
||||||
return os.path.isdir(os.path.join(self.config.base_dir, "tasks"))
|
tree[key] = value \
|
||||||
|
if len(vector) == 1 \
|
||||||
|
else self._add_dict_branch(tree[key] if key in tree else {}, vector[1:], value)
|
||||||
|
return tree
|
||||||
|
|
||||||
def get_annotations_definition(self, automatic=True):
|
def get_annotations_definition(self, automatic=True):
|
||||||
annotations = {}
|
annotations = {}
|
||||||
if automatic:
|
if automatic:
|
||||||
for k, item in self.ANNOTATIONS.items():
|
for k, item in self.ANNOTATIONS.items():
|
||||||
if item.get("automatic"):
|
if "automatic" in item.keys() and item["automatic"]:
|
||||||
annotations[k] = item
|
annotations[k] = item
|
||||||
return annotations
|
return annotations
|
||||||
|
|
||||||
@ -228,84 +305,19 @@ class Config:
|
|||||||
annotations = []
|
annotations = []
|
||||||
if automatic:
|
if automatic:
|
||||||
for k, item in self.ANNOTATIONS.items():
|
for k, item in self.ANNOTATIONS.items():
|
||||||
if item.get("automatic"):
|
if "automatic" in item.keys() and item["automatic"]:
|
||||||
annotations.append(k)
|
annotations.append(k)
|
||||||
return annotations
|
return annotations
|
||||||
|
|
||||||
def _init_logger(self):
|
def get_template(self):
|
||||||
styles = structlog.dev.ConsoleRenderer.get_default_level_styles()
|
"""
|
||||||
styles["debug"] = colorama.Fore.BLUE
|
Get the base dir for the template to use.
|
||||||
|
|
||||||
processors = [
|
:return: str abs path
|
||||||
structlog.contextvars.merge_contextvars,
|
"""
|
||||||
structlog.processors.add_log_level,
|
template_dir = self.config.get("template_dir")
|
||||||
structlog.processors.StackInfoRenderer(),
|
template = self.config.get("template")
|
||||||
structlog.dev.set_exc_info,
|
return os.path.realpath(os.path.join(template_dir, template))
|
||||||
structlog.processors.TimeStamper(fmt="%Y-%m-%d %H:%M:%S", utc=False),
|
|
||||||
]
|
|
||||||
|
|
||||||
if self.config.logging.json:
|
|
||||||
processors.append(ErrorStringifier())
|
|
||||||
processors.append(structlog.processors.JSONRenderer())
|
|
||||||
else:
|
|
||||||
processors.append(MultilineConsoleRenderer(level_styles=styles))
|
|
||||||
|
|
||||||
try:
|
|
||||||
structlog.configure(
|
|
||||||
processors=processors,
|
|
||||||
wrapper_class=structlog.make_filtering_bound_logger(
|
|
||||||
logging.getLevelName(self.config.get("logging.level")),
|
|
||||||
),
|
|
||||||
)
|
|
||||||
structlog.contextvars.unbind_contextvars()
|
|
||||||
except KeyError as e:
|
|
||||||
raise ansibledoctor.exception.ConfigError(f"Can not set log level: {e!s}") from e
|
|
||||||
|
|
||||||
|
|
||||||
class ErrorStringifier:
|
|
||||||
"""A processor that converts exceptions to a string representation."""
|
|
||||||
|
|
||||||
def __call__(self, _, __, event_dict):
|
|
||||||
if "error" not in event_dict:
|
|
||||||
return event_dict
|
|
||||||
|
|
||||||
err = event_dict.get("error")
|
|
||||||
|
|
||||||
if isinstance(err, Exception):
|
|
||||||
event_dict["error"] = f"{err.__class__.__name__}: {err}"
|
|
||||||
|
|
||||||
return event_dict
|
|
||||||
|
|
||||||
|
|
||||||
class MultilineConsoleRenderer(structlog.dev.ConsoleRenderer):
|
|
||||||
"""A processor for printing multiline strings."""
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def __call__(self, _, __, event_dict):
|
|
||||||
err = None
|
|
||||||
|
|
||||||
if "error" in event_dict:
|
|
||||||
err = event_dict.pop("error")
|
|
||||||
|
|
||||||
event_dict = super().__call__(_, __, event_dict)
|
|
||||||
|
|
||||||
if not err:
|
|
||||||
return event_dict
|
|
||||||
|
|
||||||
sio = StringIO()
|
|
||||||
sio.write(event_dict)
|
|
||||||
|
|
||||||
if isinstance(err, Exception):
|
|
||||||
sio.write(
|
|
||||||
f"\n{colorama.Fore.RED}{err.__class__.__name__}:"
|
|
||||||
f"{colorama.Style.RESET_ALL} {str(err).strip()}"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
sio.write(f"\n{err.strip()}")
|
|
||||||
|
|
||||||
return sio.getvalue()
|
|
||||||
|
|
||||||
|
|
||||||
class SingleConfig(Config, metaclass=Singleton):
|
class SingleConfig(Config, metaclass=Singleton):
|
||||||
|
@ -1,127 +1,146 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""Prepare output and write compiled jinja2 templates."""
|
"""Prepare output and write compiled jinja2 templates."""
|
||||||
|
|
||||||
import json
|
import glob
|
||||||
|
import ntpath
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
|
|
||||||
import jinja2.exceptions
|
import jinja2.exceptions
|
||||||
import ruamel.yaml
|
import ruamel.yaml
|
||||||
import structlog
|
from jinja2 import Environment
|
||||||
from jinja2 import Environment, FileSystemLoader
|
from jinja2 import FileSystemLoader
|
||||||
from jinja2.filters import pass_eval_context
|
from jinja2.filters import evalcontextfilter
|
||||||
|
|
||||||
|
import ansibledoctor.exception
|
||||||
from ansibledoctor.config import SingleConfig
|
from ansibledoctor.config import SingleConfig
|
||||||
from ansibledoctor.template import Template
|
from ansibledoctor.utils import FileUtils
|
||||||
from ansibledoctor.utils import FileUtils, sysexit_with_message
|
from ansibledoctor.utils import SingleLog
|
||||||
|
|
||||||
|
|
||||||
class Generator:
|
class Generator:
|
||||||
"""Generate documentation from jinja2 templates."""
|
"""Generate documentation from jinja2 templates."""
|
||||||
|
|
||||||
def __init__(self, doc_parser):
|
def __init__(self, doc_parser):
|
||||||
self.log = structlog.get_logger()
|
self.template_files = []
|
||||||
|
self.extension = "j2"
|
||||||
|
self._parser = None
|
||||||
self.config = SingleConfig()
|
self.config = SingleConfig()
|
||||||
self.template = Template(
|
self.log = SingleLog()
|
||||||
self.config.config.get("template.name"),
|
self.logger = self.log.logger
|
||||||
self.config.config.get("template.src"),
|
|
||||||
)
|
|
||||||
self._parser = doc_parser
|
self._parser = doc_parser
|
||||||
|
self._scan_template()
|
||||||
|
|
||||||
|
def _scan_template(self):
|
||||||
|
"""
|
||||||
|
Search for Jinja2 (.j2) files to apply to the destination.
|
||||||
|
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
template_dir = self.config.get_template()
|
||||||
|
if os.path.isdir(template_dir):
|
||||||
|
self.logger.info("Using template dir: {}".format(template_dir))
|
||||||
|
else:
|
||||||
|
self.log.sysexit_with_message("Can not open template dir {}".format(template_dir))
|
||||||
|
|
||||||
|
for file in glob.iglob(template_dir + "/**/*." + self.extension, recursive=True):
|
||||||
|
relative_file = file[len(template_dir) + 1:]
|
||||||
|
if ntpath.basename(file)[:1] != "_":
|
||||||
|
self.logger.debug("Found template file: " + relative_file)
|
||||||
|
self.template_files.append(relative_file)
|
||||||
|
else:
|
||||||
|
self.logger.debug("Ignoring template file: " + relative_file)
|
||||||
|
|
||||||
def _create_dir(self, directory):
|
def _create_dir(self, directory):
|
||||||
if not self.config.config["dry_run"] and not os.path.isdir(directory):
|
if not self.config.config["dry_run"] and not os.path.isdir(directory):
|
||||||
try:
|
try:
|
||||||
os.makedirs(directory, exist_ok=True)
|
os.makedirs(directory, exist_ok=True)
|
||||||
self.log.info(f"Creating dir: {directory}")
|
self.logger.info("Creating dir: " + directory)
|
||||||
except FileExistsError as e:
|
except FileExistsError as e:
|
||||||
sysexit_with_message(e)
|
self.log.sysexit_with_message(str(e))
|
||||||
|
|
||||||
def _write_doc(self):
|
def _write_doc(self):
|
||||||
files_to_overwite = []
|
files_to_overwite = []
|
||||||
|
|
||||||
for tf in self.template.files:
|
for file in self.template_files:
|
||||||
doc_file = os.path.join(
|
doc_file = os.path.join(
|
||||||
self.config.config.get("renderer.dest"), os.path.splitext(tf)[0]
|
self.config.config.get("output_dir"),
|
||||||
|
os.path.splitext(file)[0]
|
||||||
)
|
)
|
||||||
if os.path.isfile(doc_file):
|
if os.path.isfile(doc_file):
|
||||||
files_to_overwite.append(doc_file)
|
files_to_overwite.append(doc_file)
|
||||||
|
|
||||||
header_file = self.config.config.get("renderer.include_header")
|
header_file = self.config.config.get("custom_header")
|
||||||
role_data = self._parser.get_data()
|
role_data = self._parser.get_data()
|
||||||
header_content = ""
|
header_content = ""
|
||||||
if bool(header_file):
|
if bool(header_file):
|
||||||
role_data["internal"]["append"] = True
|
role_data["internal"]["append"] = True
|
||||||
try:
|
try:
|
||||||
with open(header_file) as a:
|
with open(header_file, "r") as a:
|
||||||
header_content = a.read()
|
header_content = a.read()
|
||||||
except FileNotFoundError as e:
|
except FileNotFoundError as e:
|
||||||
sysexit_with_message("Can not open custom header file", path=header_file, error=e)
|
self.log.sysexit_with_message("Can not open custom header file\n{}".format(str(e)))
|
||||||
|
|
||||||
if (
|
if len(files_to_overwite) > 0 and self.config.config.get("force_overwrite") is False:
|
||||||
len(files_to_overwite) > 0
|
if not self.config.config["dry_run"]:
|
||||||
and self.config.config.get("renderer.force_overwrite") is False
|
self.logger.warn("This files will be overwritten:")
|
||||||
and not self.config.config["dry_run"]
|
print(*files_to_overwite, sep="\n")
|
||||||
):
|
|
||||||
files_to_overwite_string = "\n".join(files_to_overwite)
|
|
||||||
prompt = f"These files will be overwritten:\n{files_to_overwite_string}".replace(
|
|
||||||
"\n", "\n... "
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if not FileUtils.query_yes_no(f"{prompt}\nDo you want to continue?"):
|
if not FileUtils.query_yes_no("Do you want to continue?"):
|
||||||
sysexit_with_message("Aborted...")
|
self.log.sysexit_with_message("Aborted...")
|
||||||
except KeyboardInterrupt:
|
except ansibledoctor.exception.InputError as e:
|
||||||
sysexit_with_message("Aborted...")
|
self.logger.debug(str(e))
|
||||||
|
self.log.sysexit_with_message("Aborted...")
|
||||||
|
|
||||||
for tf in self.template.files:
|
for file in self.template_files:
|
||||||
doc_file = os.path.join(
|
doc_file = os.path.join(
|
||||||
self.config.config.get("renderer.dest"), os.path.splitext(tf)[0]
|
self.config.config.get("output_dir"),
|
||||||
|
os.path.splitext(file)[0]
|
||||||
)
|
)
|
||||||
template = os.path.join(self.template.path, tf)
|
source_file = self.config.get_template() + "/" + file
|
||||||
|
|
||||||
self.log.debug("Writing renderer output", path=doc_file, src=os.path.dirname(template))
|
self.logger.debug("Writing doc output to: " + doc_file + " from: " + source_file)
|
||||||
|
|
||||||
# make sure the directory exists
|
# make sure the directory exists
|
||||||
self._create_dir(os.path.dirname(doc_file))
|
self._create_dir(os.path.dirname(doc_file))
|
||||||
|
|
||||||
if os.path.exists(template) and os.path.isfile(template):
|
if os.path.exists(source_file) and os.path.isfile(source_file):
|
||||||
with open(template) as template:
|
with open(source_file, "r") as template:
|
||||||
data = template.read()
|
data = template.read()
|
||||||
if data is not None:
|
if data is not None:
|
||||||
try:
|
try:
|
||||||
jenv = Environment( # nosec
|
jenv = Environment( # nosec
|
||||||
loader=FileSystemLoader(self.template.path),
|
loader=FileSystemLoader(self.config.get_template()),
|
||||||
lstrip_blocks=True,
|
lstrip_blocks=True,
|
||||||
trim_blocks=True,
|
trim_blocks=True
|
||||||
autoescape=jinja2.select_autoescape(),
|
|
||||||
)
|
)
|
||||||
jenv.filters["to_nice_yaml"] = self._to_nice_yaml
|
jenv.filters["to_nice_yaml"] = self._to_nice_yaml
|
||||||
jenv.filters["to_code"] = self._to_code
|
|
||||||
jenv.filters["deep_get"] = self._deep_get
|
jenv.filters["deep_get"] = self._deep_get
|
||||||
jenv.filters["safe_join"] = self._safe_join
|
jenv.filters["save_join"] = self._save_join
|
||||||
# keep the old name of the function to not break custom templates.
|
data = jenv.from_string(data).render(role_data, role=role_data)
|
||||||
jenv.filters["save_join"] = self._safe_join
|
|
||||||
template_options = self.config.config.get("template.options")
|
|
||||||
data = jenv.from_string(data).render(
|
|
||||||
role_data, role=role_data, options=template_options
|
|
||||||
)
|
|
||||||
if not self.config.config["dry_run"]:
|
if not self.config.config["dry_run"]:
|
||||||
with open(doc_file, "wb") as outfile:
|
with open(doc_file, "wb") as outfile:
|
||||||
outfile.write(header_content.encode("utf-8"))
|
outfile.write(header_content.encode("utf-8"))
|
||||||
outfile.write(data.encode("utf-8"))
|
outfile.write(data.encode("utf-8"))
|
||||||
|
self.logger.info("Writing to: " + doc_file)
|
||||||
|
else:
|
||||||
|
self.logger.info("Writing to: " + doc_file)
|
||||||
except (
|
except (
|
||||||
jinja2.exceptions.UndefinedError,
|
jinja2.exceptions.UndefinedError, jinja2.exceptions.TemplateSyntaxError
|
||||||
jinja2.exceptions.TemplateSyntaxError,
|
|
||||||
jinja2.exceptions.TemplateRuntimeError,
|
|
||||||
) as e:
|
) as e:
|
||||||
sysexit_with_message(
|
self.log.sysexit_with_message(
|
||||||
"Jinja2 template error while loading file", path=tf, error=e
|
"Jinja2 templating error while loading file: '{}'\n{}".format(
|
||||||
|
file, str(e)
|
||||||
|
)
|
||||||
)
|
)
|
||||||
except UnicodeEncodeError as e:
|
except UnicodeEncodeError as e:
|
||||||
sysexit_with_message("Failed to print special characters", error=e)
|
self.log.sysexit_with_message(
|
||||||
|
"Unable to print special characters\n{}".format(str(e))
|
||||||
|
)
|
||||||
|
|
||||||
def _to_nice_yaml(self, a, indent=4, **kw):
|
def _to_nice_yaml(self, a, indent=4, *args, **kw):
|
||||||
"""Make verbose, human readable yaml."""
|
"""Make verbose, human readable yaml."""
|
||||||
yaml = ruamel.yaml.YAML()
|
yaml = ruamel.yaml.YAML()
|
||||||
yaml.indent(mapping=indent, sequence=(indent * 2), offset=indent)
|
yaml.indent(mapping=indent, sequence=(indent * 2), offset=indent)
|
||||||
@ -129,52 +148,22 @@ class Generator:
|
|||||||
yaml.dump(a, stream, **kw)
|
yaml.dump(a, stream, **kw)
|
||||||
return stream.getvalue().rstrip()
|
return stream.getvalue().rstrip()
|
||||||
|
|
||||||
def _to_code(self, a, to_multiline=False, tab_var=False, preserve_ms=False, lang="plain"):
|
def _deep_get(self, _, dictionary, keys, *args, **kw):
|
||||||
"""Wrap a string in backticks."""
|
|
||||||
if a is None or a == "":
|
|
||||||
return ""
|
|
||||||
|
|
||||||
if (isinstance(a, list) and len(a) < 1) or (isinstance(a, dict) and not a):
|
|
||||||
return ""
|
|
||||||
|
|
||||||
if isinstance(a, list) and len(a) > 1 and preserve_ms:
|
|
||||||
return a
|
|
||||||
|
|
||||||
if isinstance(a, list) and len(a) == 1:
|
|
||||||
return f"`{self._tab_var(a[0], tab_var)}`"
|
|
||||||
|
|
||||||
if (isinstance(a, list)) and to_multiline:
|
|
||||||
return "```" + lang + "\n" + "\n".join(a) + "\n```"
|
|
||||||
|
|
||||||
return f"`{self._tab_var(a, tab_var)}`"
|
|
||||||
|
|
||||||
def _tab_var(self, a, tab_var):
|
|
||||||
"""Wrap a string in backticks."""
|
|
||||||
if not tab_var:
|
|
||||||
return a
|
|
||||||
|
|
||||||
return json.dumps(a)
|
|
||||||
|
|
||||||
def _deep_get(self, _, dictionary, keys):
|
|
||||||
default = None
|
default = None
|
||||||
return reduce(
|
return reduce(
|
||||||
lambda d, key: d.get(key, default) if isinstance(d, dict) else default,
|
lambda d, key: d.get(key, default)
|
||||||
keys.split("."),
|
if isinstance(d, dict) else default, keys.split("."), dictionary
|
||||||
dictionary,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
@pass_eval_context
|
@evalcontextfilter
|
||||||
def _safe_join(self, eval_ctx, value, d=""):
|
def _save_join(self, eval_ctx, value, d=u"", attribute=None):
|
||||||
if isinstance(value, str):
|
if isinstance(value, str):
|
||||||
value = [value]
|
value = [value]
|
||||||
|
|
||||||
normalized = jinja2.filters.do_join(eval_ctx, value, d, attribute=None)
|
joined = jinja2.filters.do_join(eval_ctx, value, d, attribute=None)
|
||||||
|
nornalized = re.sub(r" +(\n|\t| )", "\\1", joined)
|
||||||
if self.config.config.renderer.autotrim:
|
return nornalized
|
||||||
for s in [r" +(\n|\t| )", r"(\n|\t) +"]:
|
|
||||||
normalized = re.sub(s, "\\1", normalized)
|
|
||||||
|
|
||||||
return jinja2.filters.do_mark_safe(normalized)
|
|
||||||
|
|
||||||
def render(self):
|
def render(self):
|
||||||
|
self.logger.info("Using output dir: " + self.config.config.get("output_dir"))
|
||||||
self._write_doc()
|
self._write_doc()
|
||||||
|
@ -5,15 +5,16 @@ import fnmatch
|
|||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
import anyconfig
|
import anyconfig
|
||||||
import structlog
|
import ruamel.yaml
|
||||||
|
from nested_lookup import nested_lookup
|
||||||
|
|
||||||
from ansibledoctor.annotation import Annotation
|
from ansibledoctor.annotation import Annotation
|
||||||
from ansibledoctor.config import SingleConfig
|
from ansibledoctor.config import SingleConfig
|
||||||
from ansibledoctor.contstants import YAML_EXTENSIONS
|
from ansibledoctor.contstants import YAML_EXTENSIONS
|
||||||
from ansibledoctor.exception import YAMLError
|
|
||||||
from ansibledoctor.file_registry import Registry
|
from ansibledoctor.file_registry import Registry
|
||||||
from ansibledoctor.utils import flatten, sysexit_with_message
|
from ansibledoctor.utils import SingleLog
|
||||||
from ansibledoctor.utils.yamlhelper import parse_yaml, parse_yaml_ansible
|
from ansibledoctor.utils import UnsafeTag
|
||||||
|
from ansibledoctor.utils import flatten
|
||||||
|
|
||||||
|
|
||||||
class Parser:
|
class Parser:
|
||||||
@ -23,73 +24,107 @@ class Parser:
|
|||||||
self._annotation_objs = {}
|
self._annotation_objs = {}
|
||||||
self._data = defaultdict(dict)
|
self._data = defaultdict(dict)
|
||||||
self.config = SingleConfig()
|
self.config = SingleConfig()
|
||||||
self.log = structlog.get_logger()
|
self.log = SingleLog()
|
||||||
|
self.logger = SingleLog().logger
|
||||||
self._files_registry = Registry()
|
self._files_registry = Registry()
|
||||||
self._parse_meta_file()
|
self._parse_meta_file()
|
||||||
self._parse_var_files()
|
self._parse_var_files()
|
||||||
self._parse_task_tags()
|
self._parse_task_tags()
|
||||||
self._populate_doc_data()
|
self._populate_doc_data()
|
||||||
|
|
||||||
|
def _yaml_remove_comments(self, d):
|
||||||
|
if isinstance(d, dict):
|
||||||
|
for k, v in d.items():
|
||||||
|
self._yaml_remove_comments(k)
|
||||||
|
self._yaml_remove_comments(v)
|
||||||
|
elif isinstance(d, list):
|
||||||
|
for elem in d:
|
||||||
|
self._yaml_remove_comments(elem)
|
||||||
|
try:
|
||||||
|
attr = "comment" if isinstance(
|
||||||
|
d, ruamel.yaml.scalarstring.ScalarString
|
||||||
|
) else ruamel.yaml.comments.Comment.attrib
|
||||||
|
delattr(d, attr)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
||||||
def _parse_var_files(self):
|
def _parse_var_files(self):
|
||||||
for rfile in self._files_registry.get_files():
|
for rfile in self._files_registry.get_files():
|
||||||
if any(fnmatch.fnmatch(rfile, "*/defaults/*." + ext) for ext in YAML_EXTENSIONS):
|
if any(fnmatch.fnmatch(rfile, "*/defaults/*." + ext) for ext in YAML_EXTENSIONS):
|
||||||
with open(rfile, encoding="utf8") as yamlfile:
|
with open(rfile, "r", encoding="utf8") as yaml_file:
|
||||||
try:
|
try:
|
||||||
raw = parse_yaml(yamlfile)
|
ruamel.yaml.add_constructor(
|
||||||
except YAMLError as e:
|
UnsafeTag.yaml_tag,
|
||||||
sysexit_with_message("Failed to read yaml file", path=rfile, error=e)
|
UnsafeTag.yaml_constructor,
|
||||||
|
constructor=ruamel.yaml.SafeConstructor
|
||||||
|
)
|
||||||
|
|
||||||
data = defaultdict(dict, raw or {})
|
raw = ruamel.yaml.YAML(typ="rt").load(yaml_file)
|
||||||
|
self._yaml_remove_comments(raw)
|
||||||
|
|
||||||
for key, value in data.items():
|
data = defaultdict(dict, raw or {})
|
||||||
self._data["var"][key] = {"value": {key: value}}
|
for key, value in data.items():
|
||||||
|
self._data["var"][key] = {"value": {key: value}}
|
||||||
|
except (
|
||||||
|
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError,
|
||||||
|
ruamel.yaml.constructor.ConstructorError
|
||||||
|
) as e:
|
||||||
|
message = "{} {}".format(e.context, e.problem)
|
||||||
|
self.log.sysexit_with_message(
|
||||||
|
"Unable to read yaml file {}\n{}".format(rfile, message)
|
||||||
|
)
|
||||||
|
|
||||||
def _parse_meta_file(self):
|
def _parse_meta_file(self):
|
||||||
self._data["meta"]["name"] = {"value": self.config.config["role_name"]}
|
|
||||||
|
|
||||||
for rfile in self._files_registry.get_files():
|
for rfile in self._files_registry.get_files():
|
||||||
if any("meta/main." + ext in rfile for ext in YAML_EXTENSIONS):
|
if any("meta/main." + ext in rfile for ext in YAML_EXTENSIONS):
|
||||||
with open(rfile, encoding="utf8") as yamlfile:
|
with open(rfile, "r", encoding="utf8") as yaml_file:
|
||||||
try:
|
try:
|
||||||
raw = parse_yaml(yamlfile)
|
raw = ruamel.yaml.YAML(typ="rt").load(yaml_file)
|
||||||
except YAMLError as e:
|
self._yaml_remove_comments(raw)
|
||||||
sysexit_with_message("Failed to read yaml file", path=rfile, error=e)
|
|
||||||
|
|
||||||
data = defaultdict(dict, raw)
|
data = defaultdict(dict, raw)
|
||||||
if data.get("galaxy_info"):
|
if data.get("galaxy_info"):
|
||||||
for key, value in data.get("galaxy_info").items():
|
for key, value in data.get("galaxy_info").items():
|
||||||
self._data["meta"][key] = {"value": value}
|
self._data["meta"][key] = {"value": value}
|
||||||
|
|
||||||
if data.get("dependencies") is not None:
|
if data.get("dependencies") is not None:
|
||||||
self._data["meta"]["dependencies"] = {"value": data.get("dependencies")}
|
self._data["meta"]["dependencies"] = {
|
||||||
|
"value": data.get("dependencies")
|
||||||
|
}
|
||||||
|
|
||||||
|
self._data["meta"]["name"] = {"value": self.config.config["role_name"]}
|
||||||
|
except (
|
||||||
|
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
|
||||||
|
) as e:
|
||||||
|
message = "{} {}".format(e.context, e.problem)
|
||||||
|
self.log.sysexit_with_message(
|
||||||
|
"Unable to read yaml file {}\n{}".format(rfile, message)
|
||||||
|
)
|
||||||
|
|
||||||
def _parse_task_tags(self):
|
def _parse_task_tags(self):
|
||||||
for rfile in self._files_registry.get_files():
|
for rfile in self._files_registry.get_files():
|
||||||
if any(fnmatch.fnmatch(rfile, "*/tasks/*." + ext) for ext in YAML_EXTENSIONS):
|
if any(fnmatch.fnmatch(rfile, "*/tasks/*." + ext) for ext in YAML_EXTENSIONS):
|
||||||
with open(rfile, encoding="utf8") as yamlfile:
|
with open(rfile, "r", encoding="utf8") as yaml_file:
|
||||||
try:
|
try:
|
||||||
raw = parse_yaml_ansible(yamlfile)
|
raw = ruamel.yaml.YAML(typ="rt").load(yaml_file)
|
||||||
except YAMLError as e:
|
self._yaml_remove_comments(raw)
|
||||||
sysexit_with_message("Failed to read yaml file", path=rfile, error=e)
|
|
||||||
|
|
||||||
tags = []
|
tags = list(set(flatten(nested_lookup("tags", raw))))
|
||||||
for task in raw:
|
for tag in tags:
|
||||||
task_tags = task.get("tags", [])
|
self._data["tag"][tag] = {"value": tag}
|
||||||
if isinstance(task_tags, str):
|
except (
|
||||||
task_tags = [task_tags]
|
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
|
||||||
|
) as e:
|
||||||
for tag in task_tags:
|
message = "{} {}".format(e.context, e.problem)
|
||||||
if tag not in self.config.config["exclude_tags"]:
|
self.log.sysexit_with_message(
|
||||||
tags.append(tag)
|
"Unable to read yaml file {}\n{}".format(rfile, message)
|
||||||
|
)
|
||||||
for tag in flatten(tags):
|
|
||||||
self._data["tag"][tag] = {"value": tag}
|
|
||||||
|
|
||||||
def _populate_doc_data(self):
|
def _populate_doc_data(self):
|
||||||
"""Generate the documentation data object."""
|
"""Generate the documentation data object."""
|
||||||
tags = defaultdict(dict)
|
tags = defaultdict(dict)
|
||||||
for annotation in self.config.get_annotations_names(automatic=True):
|
for annotation in self.config.get_annotations_names(automatic=True):
|
||||||
self.log.info(f"Lookup annotation @{annotation}")
|
self.logger.info("Finding annotations for: @" + annotation)
|
||||||
self._annotation_objs[annotation] = Annotation(
|
self._annotation_objs[annotation] = Annotation(
|
||||||
name=annotation, files_registry=self._files_registry
|
name=annotation, files_registry=self._files_registry
|
||||||
)
|
)
|
||||||
@ -98,7 +133,7 @@ class Parser:
|
|||||||
try:
|
try:
|
||||||
anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS)
|
anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
sysexit_with_message("Failed to merge annotation values", error=e)
|
self.log.sysexit_with_message("Unable to merge annotation values:\n{}".format(e))
|
||||||
|
|
||||||
def get_data(self):
|
def get_data(self):
|
||||||
return self._data
|
return self._data
|
||||||
|
@ -1,26 +1,22 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
"""Doctor exception module."""
|
"""Custom exceptions."""
|
||||||
|
|
||||||
|
|
||||||
class DoctorError(Exception):
|
class DoctorError(Exception):
|
||||||
"""Define generic exception."""
|
"""Generic exception class for ansible-doctor."""
|
||||||
|
|
||||||
def __init__(self, msg, original_exception=""):
|
def __init__(self, msg, original_exception=""):
|
||||||
super().__init__(f"{msg}\n{original_exception}")
|
super(DoctorError, self).__init__("{msg}\n{org}".format(msg=msg, org=original_exception))
|
||||||
self.original_exception = original_exception
|
self.original_exception = original_exception
|
||||||
|
|
||||||
|
|
||||||
class YAMLError(DoctorError):
|
|
||||||
"""Errors while reading a yaml file."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigError(DoctorError):
|
class ConfigError(DoctorError):
|
||||||
"""Errors related to config file handling."""
|
"""Errors related to config file handling."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class TemplateError(DoctorError):
|
class InputError(DoctorError):
|
||||||
"""Errors related to template file handling."""
|
"""Errors related to config file handling."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
@ -5,10 +5,10 @@ import glob
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
import pathspec
|
import pathspec
|
||||||
import structlog
|
|
||||||
|
|
||||||
from ansibledoctor.config import SingleConfig
|
from ansibledoctor.config import SingleConfig
|
||||||
from ansibledoctor.contstants import YAML_EXTENSIONS
|
from ansibledoctor.contstants import YAML_EXTENSIONS
|
||||||
|
from ansibledoctor.utils import SingleLog
|
||||||
|
|
||||||
|
|
||||||
class Registry:
|
class Registry:
|
||||||
@ -21,7 +21,7 @@ class Registry:
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._doc = []
|
self._doc = []
|
||||||
self.config = SingleConfig()
|
self.config = SingleConfig()
|
||||||
self.log = structlog.get_logger()
|
self.log = SingleLog().logger
|
||||||
self._scan_for_yamls()
|
self._scan_for_yamls()
|
||||||
|
|
||||||
def get_files(self):
|
def get_files(self):
|
||||||
@ -35,17 +35,24 @@ class Registry:
|
|||||||
:return: None
|
:return: None
|
||||||
"""
|
"""
|
||||||
extensions = YAML_EXTENSIONS
|
extensions = YAML_EXTENSIONS
|
||||||
base_dir = self.config.config.base_dir
|
role_dir = self.config.role_dir
|
||||||
|
role_name = os.path.basename(role_dir)
|
||||||
excludes = self.config.config.get("exclude_files")
|
excludes = self.config.config.get("exclude_files")
|
||||||
excludespec = pathspec.PathSpec.from_lines("gitwildmatch", excludes)
|
excludespec = pathspec.PathSpec.from_lines("gitwildmatch", excludes)
|
||||||
|
|
||||||
self.log.debug("Lookup role files", path=base_dir)
|
self.log.debug("Scan for files: " + role_dir)
|
||||||
|
|
||||||
for extension in extensions:
|
for extension in extensions:
|
||||||
pattern = os.path.join(base_dir, "**/*." + extension)
|
pattern = os.path.join(role_dir, "**/*." + extension)
|
||||||
for filename in glob.iglob(pattern, recursive=True):
|
for filename in glob.iglob(pattern, recursive=True):
|
||||||
if not excludespec.match_file(filename):
|
if not excludespec.match_file(filename):
|
||||||
self.log.debug("Found role file", path=os.path.relpath(filename, base_dir))
|
self.log.debug(
|
||||||
|
"Adding file to '{}': {}".format(
|
||||||
|
role_name, os.path.relpath(filename, role_dir)
|
||||||
|
)
|
||||||
|
)
|
||||||
self._doc.append(filename)
|
self._doc.append(filename)
|
||||||
else:
|
else:
|
||||||
self.log.debug("Skippped role file", path=os.path.relpath(filename, base_dir))
|
self.log.debug(
|
||||||
|
"Excluding file: {}".format(os.path.relpath(filename, role_dir))
|
||||||
|
)
|
||||||
|
@ -1,113 +0,0 @@
|
|||||||
"""Module for handling templates."""
|
|
||||||
|
|
||||||
import atexit
|
|
||||||
import glob
|
|
||||||
import ntpath
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
import structlog
|
|
||||||
from git import GitCommandError, Repo
|
|
||||||
|
|
||||||
import ansibledoctor.exception
|
|
||||||
from ansibledoctor.utils import sysexit_with_message
|
|
||||||
|
|
||||||
|
|
||||||
class Template:
|
|
||||||
"""
|
|
||||||
Represents a template that can be used to generate content.
|
|
||||||
|
|
||||||
Templates can be sourced from a local file or a Git repository. The `Template` class handles
|
|
||||||
the initialization and setup of a template, including cloning a Git repository if necessary.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
----
|
|
||||||
name (str): The name of the template.
|
|
||||||
src (str): The source of the template, in the format `<provider>><path>`.
|
|
||||||
Supported providers are `local` and `git`.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
------
|
|
||||||
ansibledoctor.exception.TemplateError
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, src):
|
|
||||||
self.log = structlog.get_logger()
|
|
||||||
self.name = name
|
|
||||||
self.src = src
|
|
||||||
|
|
||||||
try:
|
|
||||||
provider, path = self.src.split(">", 1)
|
|
||||||
except ValueError as e:
|
|
||||||
raise ansibledoctor.exception.TemplateError(
|
|
||||||
"Error reading template src", str(e)
|
|
||||||
) from e
|
|
||||||
|
|
||||||
self.provider = provider.strip().lower()
|
|
||||||
self.path = path.strip()
|
|
||||||
|
|
||||||
if self.provider == "local":
|
|
||||||
self.path = os.path.realpath(os.path.join(self.path, self.name))
|
|
||||||
elif self.provider == "git":
|
|
||||||
repo_url, branch_or_tag = (
|
|
||||||
self.path.split("#", 1) if "#" in self.path else (self.path, None)
|
|
||||||
)
|
|
||||||
temp_dir = self._clone_repo(repo_url, branch_or_tag)
|
|
||||||
self.path = os.path.join(temp_dir, self.name)
|
|
||||||
else:
|
|
||||||
raise ansibledoctor.exception.TemplateError(
|
|
||||||
f"Unsupported template provider: {provider}"
|
|
||||||
)
|
|
||||||
|
|
||||||
self.files = self._scan_files()
|
|
||||||
|
|
||||||
def _clone_repo(self, repo_url, branch_or_tag=None):
|
|
||||||
temp_dir = tempfile.mkdtemp(prefix="ansibledoctor-")
|
|
||||||
atexit.register(self._cleanup_temp_dir, temp_dir)
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.log.debug("Cloning template repo", src=repo_url)
|
|
||||||
repo = Repo.clone_from(repo_url, temp_dir)
|
|
||||||
if branch_or_tag:
|
|
||||||
self.log.debug(f"Checking out branch or tag: {branch_or_tag}")
|
|
||||||
try:
|
|
||||||
repo.git.checkout(branch_or_tag)
|
|
||||||
except GitCommandError as e:
|
|
||||||
raise ansibledoctor.exception.TemplateError(
|
|
||||||
f"Error checking out branch or tag: {branch_or_tag}: {e}"
|
|
||||||
) from e
|
|
||||||
|
|
||||||
return temp_dir
|
|
||||||
except GitCommandError as e:
|
|
||||||
msg = e.stderr.strip("'").strip()
|
|
||||||
msg = msg.removeprefix("stderr: ")
|
|
||||||
|
|
||||||
raise ansibledoctor.exception.TemplateError(
|
|
||||||
f"Error cloning Git repository: {msg}"
|
|
||||||
) from e
|
|
||||||
|
|
||||||
def _scan_files(self):
|
|
||||||
"""Search for Jinja2 (.j2) files to apply to the destination."""
|
|
||||||
template_files = []
|
|
||||||
|
|
||||||
if os.path.isdir(self.path):
|
|
||||||
self.log.info("Lookup template files", src=self.src)
|
|
||||||
else:
|
|
||||||
sysexit_with_message("Can not open template directory", path=self.path)
|
|
||||||
|
|
||||||
for file in glob.iglob(self.path + "/**/*.j2", recursive=True):
|
|
||||||
relative_file = file[len(self.path) + 1 :]
|
|
||||||
if ntpath.basename(file)[:1] != "_":
|
|
||||||
self.log.debug("Found template file", path=relative_file)
|
|
||||||
template_files.append(relative_file)
|
|
||||||
else:
|
|
||||||
self.log.debug("Skipped template file", path=relative_file)
|
|
||||||
|
|
||||||
return template_files
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _cleanup_temp_dir(temp_dir):
|
|
||||||
if temp_dir and os.path.exists(temp_dir):
|
|
||||||
shutil.rmtree(temp_dir)
|
|
@ -1,7 +0,0 @@
|
|||||||
## Requirements
|
|
||||||
|
|
||||||
{% if meta | deep_get(meta, "min_ansible_version.value") %}
|
|
||||||
- Minimum Ansible version: `{{ meta.min_ansible_version.value }}`
|
|
||||||
{% else %}
|
|
||||||
None.
|
|
||||||
{% endif %}
|
|
@ -4,8 +4,8 @@
|
|||||||
{% for key, item in tag | dictsort %}
|
{% for key, item in tag | dictsort %}
|
||||||
|
|
||||||
{{ key }}
|
{{ key }}
|
||||||
{% if item.description is defined and item.description | safe_join(" ") | striptags %}
|
{% if item.description is defined and item.description | save_join(" ") | striptags %}
|
||||||
: {{ item.description | safe_join(" ") | striptags }}
|
: {{ item.description | save_join(" ") | striptags }}
|
||||||
{% else %}
|
{% else %}
|
||||||
:
|
:
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -1,13 +1,10 @@
|
|||||||
- [Requirements](#requirements)
|
|
||||||
{% set var = role.var | default({}) %}
|
{% set var = role.var | default({}) %}
|
||||||
{% if var %}
|
{% if var %}
|
||||||
- [Default Variables](#default-variables)
|
- [Default Variables](#default-variables)
|
||||||
{% if not options.tabulate_vars %}
|
|
||||||
{% for key, item in var | dictsort %}
|
{% for key, item in var | dictsort %}
|
||||||
- [{{ key }}](#{{ key }})
|
- [{{ key }}](#{{ key }})
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
|
||||||
{% if tag %}
|
{% if tag %}
|
||||||
- [Discovered Tags](#discovered-tags)
|
- [Discovered Tags](#discovered-tags)
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -4,15 +4,15 @@
|
|||||||
|
|
||||||
{% for key, item in todo | dictsort %}
|
{% for key, item in todo | dictsort %}
|
||||||
{% for line in item %}
|
{% for line in item %}
|
||||||
{% if line.value is defined and line.value | safe_join(" ") | striptags and key == "default" %}
|
{% if line.value is defined and line.value | save_join(" ") | striptags and key == "default" %}
|
||||||
- {{ line.value | safe_join(" ") | striptags }}
|
- {{ line.value | save_join(" ") | striptags }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% for key, item in todo | dictsort %}
|
{% for key, item in todo | dictsort %}
|
||||||
{% for line in item %}
|
{% for line in item %}
|
||||||
{% if line.value is defined and line.value | safe_join(" ") | striptags and key != "default" %}
|
{% if line.value is defined and line.value | save_join(" ") | striptags and key != "default" %}
|
||||||
- ({{ key }}): {{ line.value | safe_join(" ") | striptags }}
|
- ({{ key }}): {{ line.value | save_join(" ") | striptags }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
@ -1,29 +1,13 @@
|
|||||||
{% set var = role.var | default({}) %}
|
{% set var = role.var | default({}) %}
|
||||||
{% if var %}
|
{% if var %}
|
||||||
|
|
||||||
## Default Variables
|
## Default Variables
|
||||||
{% for key, item in var | dictsort %}
|
{% for key, item in var | dictsort %}
|
||||||
|
|
||||||
### {{ key }}
|
### {{ key }}
|
||||||
{% if item.description is defined and item.description %}
|
{% if item.description is defined and item.description %}
|
||||||
{% set description = [item.description] if item.description is string else item.description %}
|
|
||||||
|
|
||||||
{{ description | map("replace", "\n\n", "\n") | safe_join("\n") }}
|
{{ item.description | save_join(" ") }}
|
||||||
{% endif %}
|
|
||||||
{% if item.deprecated is defined or item.type is defined %}
|
|
||||||
|
|
||||||
{% if item.deprecated is defined %}
|
|
||||||
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
|
|
||||||
{% set deprecated_string = deprecated | map("replace", "\n\n", "\n") | safe_join("\n") %}
|
|
||||||
{% if deprecated_string %}
|
|
||||||
**_Deprecated:_** {{ deprecated_string }}<br />
|
|
||||||
{% else %}
|
|
||||||
**_Deprecated_**<br />
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% if item.type is defined and item.type %}
|
|
||||||
{% set type = [item.type] if item.type is string else item.type %}
|
|
||||||
**_Type:_** {{ type | map("replace", "\n\n", "\n") | safe_join("\n") }}<br />
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if item.value is defined and item.value %}
|
{% if item.value is defined and item.value %}
|
||||||
|
|
||||||
|
@ -1,49 +0,0 @@
|
|||||||
{% set var = role.var | default({}) %}
|
|
||||||
{% if var %}
|
|
||||||
## Default Variables
|
|
||||||
|
|
||||||
{% set columns = ["variable", "default", "description", "type", "deprecated", "example"] %}
|
|
||||||
{% set found_columns = ["variable", "default"] + var.values() | map("list") | sum(start=["key"]) | unique | list %}
|
|
||||||
{% for c in columns %}
|
|
||||||
{% if c in found_columns %}
|
|
||||||
|{{ c | capitalize -}}
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
|
||||||
{% for c in columns %}
|
|
||||||
{% if c in found_columns %}
|
|
||||||
|{{ "-" * (c | length) -}}
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
|
||||||
{% for key, item in var | dictsort %}
|
|
||||||
|{{ key | to_code -}}
|
|
||||||
|{{ (item.value | default({}))[key] | default | to_code -}}
|
|
||||||
{% if "description" in found_columns %}
|
|
||||||
|{{ item.description | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
|
|
||||||
{% endif %}
|
|
||||||
{% if "type" in found_columns %}
|
|
||||||
|{{ item.type | default([]) | to_code(skip_list_len=1) | safe_join("<br />") -}}
|
|
||||||
{% endif %}
|
|
||||||
{% if "deprecated" in found_columns %}
|
|
||||||
|
|
|
||||||
{%- if "deprecated" in found_columns %}
|
|
||||||
{% if item.deprecated is defined %}
|
|
||||||
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
|
|
||||||
{% set deprecated_string = deprecated | map("replace", "\n", "<br />") | safe_join("<br />") %}
|
|
||||||
{% if deprecated_string -%}
|
|
||||||
{{ deprecated_string }}
|
|
||||||
{%- else -%}
|
|
||||||
True
|
|
||||||
{%- endif %}
|
|
||||||
{%- else -%}
|
|
||||||
False
|
|
||||||
{%- endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% if "example" in found_columns %}
|
|
||||||
|{{ item.example | default([]) | to_code(skip_list_len=1) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
|
|
||||||
{% endif %}
|
|
||||||
|
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
@ -1,35 +1,22 @@
|
|||||||
{% if not append | deep_get(role, "internal.append") %}
|
{% if not append | deep_get(role, "internal.append") %}
|
||||||
{% set meta = role.meta | default({}) %}
|
{% set meta = role.meta | default({}) %}
|
||||||
---
|
---
|
||||||
title: {{ meta.name.value | safe_join(" ") }}
|
title: {{ meta.name.value | save_join(" ") }}
|
||||||
type: docs
|
type: docs
|
||||||
{% if summary | deep_get(meta, "summary.value") %}
|
|
||||||
summary: {{ meta.summary.value | safe_join(" ") }}
|
|
||||||
{% endif %}
|
|
||||||
---
|
---
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if description | deep_get(meta, "description.value") %}
|
{% if description | deep_get(meta, "description.value") %}
|
||||||
{% set description = [meta.description.value] if meta.description.value is string else meta.description.value %}
|
|
||||||
|
|
||||||
{{ description | map("replace", "\n\n", "\n") | safe_join("\n") }}
|
{{ meta.description.value | save_join(" ") }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
<!--more-->
|
|
||||||
|
|
||||||
{# TOC #}
|
{# TOC #}
|
||||||
{% include '_toc.j2' +%}
|
{% include '_toc.j2' %}
|
||||||
|
|
||||||
{# Requirements #}
|
|
||||||
{% include '_requirements.j2' %}
|
|
||||||
|
|
||||||
{# Vars #}
|
{# Vars #}
|
||||||
{% if options.tabulate_vars %}
|
|
||||||
{% include '_vars_tabulated.j2' %}
|
|
||||||
{% else %}
|
|
||||||
{% include '_vars.j2' %}
|
{% include '_vars.j2' %}
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{# Tag #}
|
{# Todo #}
|
||||||
{% include '_tag.j2' %}
|
{% include '_tag.j2' %}
|
||||||
|
|
||||||
{# Todo #}
|
{# Todo #}
|
||||||
|
@ -1,27 +1,19 @@
|
|||||||
{% if not append | deep_get(role, "internal.append") %}
|
{% if not append | deep_get(role, "internal.append") %}
|
||||||
{% set meta = role.meta | default({}) %}
|
{% set meta = role.meta | default({}) %}
|
||||||
# {{ meta.name.value | safe_join(" ") }}
|
# {{ meta.name.value | save_join(" ") }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if description | deep_get(meta, "description.value") %}
|
{% if description | deep_get(meta, "description.value") %}
|
||||||
{% set description = [meta.description.value] if meta.description.value is string else meta.description.value %}
|
|
||||||
|
|
||||||
{{ description | map("replace", "\n\n", "\n") | safe_join("\n") }}
|
{{ meta.description.value | save_join(" ") }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
{# TOC #}
|
{# TOC #}
|
||||||
{% include '_toc.j2' +%}
|
{% include '_toc.j2' %}
|
||||||
|
|
||||||
{# Requirements #}
|
|
||||||
{% include '_requirements.j2' %}
|
|
||||||
|
|
||||||
{# Vars #}
|
{# Vars #}
|
||||||
{% if options.tabulate_vars %}
|
|
||||||
{% include '_vars_tabulated.j2' %}
|
|
||||||
{% else %}
|
|
||||||
{% include '_vars.j2' %}
|
{% include '_vars.j2' %}
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{# Tag #}
|
{# Todo #}
|
||||||
{% include '_tag.j2' %}
|
{% include '_tag.j2' %}
|
||||||
|
|
||||||
{# Todo #}
|
{# Todo #}
|
||||||
|
@ -9,9 +9,7 @@
|
|||||||
{% set deps = meta.dependencies.value %}
|
{% set deps = meta.dependencies.value %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% for item in deps %}
|
{% for item in deps %}
|
||||||
{% if item is string or item.role %}
|
- {{ item }}
|
||||||
- {{ item if item is string else item.role }}
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% else %}
|
{% else %}
|
||||||
None.
|
None.
|
||||||
@ -20,12 +18,12 @@ None.
|
|||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
{{ meta.license.value | safe_join(" ") }}
|
{{ meta.license.value }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if author | deep_get(meta, "author.value") %}
|
{% if author | deep_get(meta, "author.value") %}
|
||||||
|
|
||||||
## Author
|
## Author
|
||||||
|
|
||||||
{{ meta.author.value | safe_join(" ") }}
|
{{ meta.author.value | save_join(" ") }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
## Requirements
|
|
||||||
|
|
||||||
{% if meta | deep_get(meta, "min_ansible_version.value") %}
|
|
||||||
- Minimum Ansible version: `{{ meta.min_ansible_version.value }}`
|
|
||||||
{% else %}
|
|
||||||
None.
|
|
||||||
{% endif %}
|
|
@ -2,11 +2,11 @@
|
|||||||
{% if tag %}
|
{% if tag %}
|
||||||
## Discovered Tags
|
## Discovered Tags
|
||||||
{% for key, item in tag | dictsort %}
|
{% for key, item in tag | dictsort %}
|
||||||
{% set is_desc = item.description is defined and item.description | safe_join(" ") | striptags %}
|
{% set is_desc = item.description is defined and item.description | save_join(" ") | striptags %}
|
||||||
|
|
||||||
**_{{ key }}_**{{ "\\" if is_desc else "" }}
|
**_{{ key }}_**{{ "\\" if is_desc else "" }}
|
||||||
{% if is_desc %}
|
{% if is_desc %}
|
||||||
 {{ item.description | safe_join(" ") | striptags }}
|
 {{ item.description | save_join(" ") | striptags }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -1,15 +1,12 @@
|
|||||||
## Table of content
|
## Table of content
|
||||||
|
|
||||||
- [Requirements](#requirements)
|
|
||||||
{% set var = role.var | default({}) %}
|
{% set var = role.var | default({}) %}
|
||||||
{% if var %}
|
{% if var %}
|
||||||
- [Default Variables](#default-variables)
|
- [Default Variables](#default-variables)
|
||||||
{% if not options.tabulate_vars %}
|
|
||||||
{% for key, item in var | dictsort %}
|
{% for key, item in var | dictsort %}
|
||||||
- [{{ key }}](#{{ key }})
|
- [{{ key }}](#{{ key }})
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
|
||||||
{% if tag %}
|
{% if tag %}
|
||||||
- [Discovered Tags](#discovered-tags)
|
- [Discovered Tags](#discovered-tags)
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -4,15 +4,15 @@
|
|||||||
|
|
||||||
{% for key, item in todo | dictsort %}
|
{% for key, item in todo | dictsort %}
|
||||||
{% for line in item %}
|
{% for line in item %}
|
||||||
{% if line.value is defined and line.value | safe_join(" ") | striptags and key == "default" %}
|
{% if line.value is defined and line.value | save_join(" ") | striptags and key == "default" %}
|
||||||
- {{ line.value | safe_join(" ") | striptags }}
|
- {{ line.value | save_join(" ") | striptags }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% for key, item in todo | dictsort %}
|
{% for key, item in todo | dictsort %}
|
||||||
{% for line in item %}
|
{% for line in item %}
|
||||||
{% if line.value is defined and line.value | safe_join(" ") | striptags and key != "default" %}
|
{% if line.value is defined and line.value | save_join(" ") | striptags and key != "default" %}
|
||||||
- ({{ key }}): {{ line.value | safe_join(" ") | striptags }}
|
- ({{ key }}): {{ line.value | save_join(" ") | striptags }}
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
@ -1,29 +1,13 @@
|
|||||||
{% set var = role.var | default({}) %}
|
{% set var = role.var | default({}) %}
|
||||||
{% if var %}
|
{% if var %}
|
||||||
|
|
||||||
## Default Variables
|
## Default Variables
|
||||||
{% for key, item in var | dictsort %}
|
{% for key, item in var | dictsort %}
|
||||||
|
|
||||||
### {{ key }}
|
### {{ key }}
|
||||||
{% if item.description is defined and item.description %}
|
{% if item.description is defined and item.description %}
|
||||||
{% set description = [item.description] if item.description is string else item.description %}
|
|
||||||
|
|
||||||
{{ description | map("replace", "\n\n", "\n") | safe_join("\n") }}
|
{{ item.description | save_join(" ") }}
|
||||||
{% endif %}
|
|
||||||
{% if item.deprecated is defined or item.type is defined %}
|
|
||||||
|
|
||||||
{% if item.deprecated is defined %}
|
|
||||||
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
|
|
||||||
{% set deprecated_string = deprecated | map("replace", "\n\n", "\n") | safe_join("\n") %}
|
|
||||||
{% if deprecated_string %}
|
|
||||||
**_Deprecated:_** {{ deprecated_string }}<br />
|
|
||||||
{% else %}
|
|
||||||
**_Deprecated_**<br />
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% if item.type is defined and item.type %}
|
|
||||||
{% set type = [item.type] if item.type is string else item.type %}
|
|
||||||
**_Type:_** {{ type | map("replace", "\n\n", "\n") | safe_join("\n") }}<br />
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% if item.value is defined and item.value %}
|
{% if item.value is defined and item.value %}
|
||||||
|
|
||||||
|
@ -1,49 +0,0 @@
|
|||||||
{% set var = role.var | default({}) %}
|
|
||||||
{% if var %}
|
|
||||||
## Default Variables
|
|
||||||
|
|
||||||
{% set columns = ["variable", "default", "description", "type", "deprecated", "example"] %}
|
|
||||||
{% set found_columns = ["variable", "default"] + var.values() | map("list") | sum(start=["key"]) | unique | list %}
|
|
||||||
{% for c in columns %}
|
|
||||||
{% if c in found_columns %}
|
|
||||||
|{{ c | capitalize -}}
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
|
||||||
{% for c in columns %}
|
|
||||||
{% if c in found_columns %}
|
|
||||||
|{{ "-" * (c | length) -}}
|
|
||||||
{% endif %}
|
|
||||||
{% endfor %}
|
|
||||||
|
|
|
||||||
{% for key, item in var | dictsort %}
|
|
||||||
|{{ key | to_code -}}
|
|
||||||
|{{ (item.value | default({}))[key] | default | to_code(tab_var=true) -}}
|
|
||||||
{% if "description" in found_columns %}
|
|
||||||
|{{ item.description | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
|
|
||||||
{% endif %}
|
|
||||||
{% if "type" in found_columns %}
|
|
||||||
|{{ item.type | default([]) | to_code(preserve_ms=true) | safe_join("<br />") -}}
|
|
||||||
{% endif %}
|
|
||||||
{% if "deprecated" in found_columns %}
|
|
||||||
|
|
|
||||||
{%- if "deprecated" in found_columns %}
|
|
||||||
{% if item.deprecated is defined %}
|
|
||||||
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
|
|
||||||
{% set deprecated_string = deprecated | map("replace", "\n", "<br />") | safe_join("<br />") %}
|
|
||||||
{% if deprecated_string -%}
|
|
||||||
{{ deprecated_string }}
|
|
||||||
{%- else -%}
|
|
||||||
True
|
|
||||||
{%- endif %}
|
|
||||||
{%- else -%}
|
|
||||||
False
|
|
||||||
{%- endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% endif %}
|
|
||||||
{% if "example" in found_columns %}
|
|
||||||
|{{ item.example | default([]) | to_code(tab_var=true,preserve_ms=true) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
|
|
||||||
{% endif %}
|
|
||||||
|
|
|
||||||
{% endfor %}
|
|
||||||
{% endif %}
|
|
289
ansibledoctor/utils.py
Normal file
289
ansibledoctor/utils.py
Normal file
@ -0,0 +1,289 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""Global utility methods and classes."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from distutils.util import strtobool
|
||||||
|
|
||||||
|
import colorama
|
||||||
|
from pythonjsonlogger import jsonlogger
|
||||||
|
|
||||||
|
try:
|
||||||
|
from typing import Iterable
|
||||||
|
except ImportError:
|
||||||
|
from collections import Iterable
|
||||||
|
|
||||||
|
import ansibledoctor.exception
|
||||||
|
|
||||||
|
CONSOLE_FORMAT = "{}{}[%(levelname)s]{} %(message)s"
|
||||||
|
JSON_FORMAT = "%(asctime)s %(levelname)s %(message)s"
|
||||||
|
|
||||||
|
|
||||||
|
def to_bool(string):
|
||||||
|
return bool(strtobool(str(string)))
|
||||||
|
|
||||||
|
|
||||||
|
def flatten(items):
|
||||||
|
for x in items:
|
||||||
|
if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
|
||||||
|
for sub_x in flatten(x):
|
||||||
|
yield sub_x
|
||||||
|
else:
|
||||||
|
yield x
|
||||||
|
|
||||||
|
|
||||||
|
def _should_do_markup():
|
||||||
|
py_colors = os.environ.get("PY_COLORS", None)
|
||||||
|
if py_colors is not None:
|
||||||
|
return to_bool(py_colors)
|
||||||
|
|
||||||
|
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
|
||||||
|
|
||||||
|
|
||||||
|
colorama.init(autoreset=True, strip=not _should_do_markup())
|
||||||
|
|
||||||
|
|
||||||
|
class Singleton(type):
|
||||||
|
"""Meta singleton class."""
|
||||||
|
|
||||||
|
_instances = {}
|
||||||
|
|
||||||
|
def __call__(cls, *args, **kwargs):
|
||||||
|
if cls not in cls._instances:
|
||||||
|
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
||||||
|
return cls._instances[cls]
|
||||||
|
|
||||||
|
|
||||||
|
class LogFilter(object):
|
||||||
|
"""A custom log filter which excludes log messages above the logged level."""
|
||||||
|
|
||||||
|
def __init__(self, level):
|
||||||
|
"""
|
||||||
|
Initialize a new custom log filter.
|
||||||
|
|
||||||
|
:param level: Log level limit
|
||||||
|
:returns: None
|
||||||
|
|
||||||
|
"""
|
||||||
|
self.__level = level
|
||||||
|
|
||||||
|
def filter(self, logRecord): # noqa
|
||||||
|
# https://docs.python.org/3/library/logging.html#logrecord-attributes
|
||||||
|
return logRecord.levelno <= self.__level
|
||||||
|
|
||||||
|
|
||||||
|
class MultilineFormatter(logging.Formatter):
|
||||||
|
"""Logging Formatter to reset color after newline characters."""
|
||||||
|
|
||||||
|
def format(self, record): # noqa
|
||||||
|
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
|
||||||
|
return logging.Formatter.format(self, record)
|
||||||
|
|
||||||
|
|
||||||
|
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
|
||||||
|
"""Logging Formatter to remove newline characters."""
|
||||||
|
|
||||||
|
def format(self, record): # noqa
|
||||||
|
record.msg = record.msg.replace("\n", " ")
|
||||||
|
return jsonlogger.JsonFormatter.format(self, record)
|
||||||
|
|
||||||
|
|
||||||
|
class Log:
|
||||||
|
"""Handle logging."""
|
||||||
|
|
||||||
|
def __init__(self, level=logging.WARN, name="ansibledoctor", json=False):
|
||||||
|
self.logger = logging.getLogger(name)
|
||||||
|
self.logger.setLevel(level)
|
||||||
|
self.logger.addHandler(self._get_error_handler(json=json))
|
||||||
|
self.logger.addHandler(self._get_warn_handler(json=json))
|
||||||
|
self.logger.addHandler(self._get_info_handler(json=json))
|
||||||
|
self.logger.addHandler(self._get_critical_handler(json=json))
|
||||||
|
self.logger.addHandler(self._get_debug_handler(json=json))
|
||||||
|
self.logger.propagate = False
|
||||||
|
|
||||||
|
def _get_error_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
|
handler.setLevel(logging.ERROR)
|
||||||
|
handler.addFilter(LogFilter(logging.ERROR))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.error(
|
||||||
|
CONSOLE_FORMAT.format(
|
||||||
|
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def _get_warn_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setLevel(logging.WARN)
|
||||||
|
handler.addFilter(LogFilter(logging.WARN))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.warn(
|
||||||
|
CONSOLE_FORMAT.format(
|
||||||
|
colorama.Fore.YELLOW, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def _get_info_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stdout)
|
||||||
|
handler.setLevel(logging.INFO)
|
||||||
|
handler.addFilter(LogFilter(logging.INFO))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.info(
|
||||||
|
CONSOLE_FORMAT.format(
|
||||||
|
colorama.Fore.CYAN, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def _get_critical_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
|
handler.setLevel(logging.CRITICAL)
|
||||||
|
handler.addFilter(LogFilter(logging.CRITICAL))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.critical(
|
||||||
|
CONSOLE_FORMAT.format(
|
||||||
|
colorama.Fore.RED, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def _get_debug_handler(self, json=False):
|
||||||
|
handler = logging.StreamHandler(sys.stderr)
|
||||||
|
handler.setLevel(logging.DEBUG)
|
||||||
|
handler.addFilter(LogFilter(logging.DEBUG))
|
||||||
|
handler.setFormatter(
|
||||||
|
MultilineFormatter(
|
||||||
|
self.critical(
|
||||||
|
CONSOLE_FORMAT.format(
|
||||||
|
colorama.Fore.BLUE, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if json:
|
||||||
|
handler.setFormatter(MultilineJsonFormatter(JSON_FORMAT))
|
||||||
|
|
||||||
|
return handler
|
||||||
|
|
||||||
|
def set_level(self, s):
|
||||||
|
self.logger.setLevel(s)
|
||||||
|
|
||||||
|
def debug(self, msg):
|
||||||
|
"""Format info messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def critical(self, msg):
|
||||||
|
"""Format critical messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def error(self, msg):
|
||||||
|
"""Format error messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def warn(self, msg):
|
||||||
|
"""Format warn messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def info(self, msg):
|
||||||
|
"""Format info messages and return string."""
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def _color_text(self, color, msg):
|
||||||
|
"""
|
||||||
|
Colorize strings.
|
||||||
|
|
||||||
|
:param color: colorama color settings
|
||||||
|
:param msg: string to colorize
|
||||||
|
:returns: string
|
||||||
|
|
||||||
|
"""
|
||||||
|
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
|
||||||
|
|
||||||
|
def sysexit(self, code=1):
|
||||||
|
sys.exit(code)
|
||||||
|
|
||||||
|
def sysexit_with_message(self, msg, code=1):
|
||||||
|
self.logger.critical(str(msg))
|
||||||
|
self.sysexit(code)
|
||||||
|
|
||||||
|
|
||||||
|
class SingleLog(Log, metaclass=Singleton):
|
||||||
|
"""Singleton logging class."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class UnsafeTag:
|
||||||
|
"""Handle custom yaml unsafe tag."""
|
||||||
|
|
||||||
|
yaml_tag = u"!unsafe"
|
||||||
|
|
||||||
|
def __init__(self, value):
|
||||||
|
self.unsafe = value
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def yaml_constructor(loader, node):
|
||||||
|
return loader.construct_scalar(node)
|
||||||
|
|
||||||
|
|
||||||
|
class FileUtils:
|
||||||
|
"""Mics static methods for file handling."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def create_path(path):
|
||||||
|
os.makedirs(path, exist_ok=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def query_yes_no(question, default=True):
|
||||||
|
"""Ask a yes/no question via input() and return their answer.
|
||||||
|
|
||||||
|
"question" is a string that is presented to the user.
|
||||||
|
"default" is the presumed answer if the user just hits <Enter>.
|
||||||
|
It must be "yes" (the default), "no" or None (meaning
|
||||||
|
an answer is required of the user).
|
||||||
|
|
||||||
|
The "answer" return value is one of "yes" or "no".
|
||||||
|
"""
|
||||||
|
if default:
|
||||||
|
prompt = "[Y/n]"
|
||||||
|
else:
|
||||||
|
prompt = "[N/y]"
|
||||||
|
|
||||||
|
try:
|
||||||
|
# input method is safe in python3
|
||||||
|
choice = input("{} {} ".format(question, prompt)) or default # nosec
|
||||||
|
return to_bool(choice)
|
||||||
|
except (KeyboardInterrupt, ValueError) as e:
|
||||||
|
raise ansibledoctor.exception.InputError("Error while reading input", e)
|
@ -1,130 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Global utility methods and classes."""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from collections.abc import Iterable
|
|
||||||
|
|
||||||
import structlog
|
|
||||||
|
|
||||||
|
|
||||||
def strtobool(value):
|
|
||||||
"""Convert a string representation of truth to true or false."""
|
|
||||||
|
|
||||||
_map = {
|
|
||||||
"y": True,
|
|
||||||
"yes": True,
|
|
||||||
"t": True,
|
|
||||||
"true": True,
|
|
||||||
"on": True,
|
|
||||||
"1": True,
|
|
||||||
"n": False,
|
|
||||||
"no": False,
|
|
||||||
"f": False,
|
|
||||||
"false": False,
|
|
||||||
"off": False,
|
|
||||||
"0": False,
|
|
||||||
}
|
|
||||||
|
|
||||||
try:
|
|
||||||
return _map[str(value).lower()]
|
|
||||||
except KeyError as err:
|
|
||||||
raise ValueError(f'"{value}" is not a valid bool value') from err
|
|
||||||
|
|
||||||
|
|
||||||
def to_bool(string):
|
|
||||||
return bool(strtobool(str(string)))
|
|
||||||
|
|
||||||
|
|
||||||
def flatten(items):
|
|
||||||
for x in items:
|
|
||||||
if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
|
|
||||||
yield from flatten(x)
|
|
||||||
else:
|
|
||||||
yield x
|
|
||||||
|
|
||||||
|
|
||||||
def _split_string(string, delimiter, escape, maxsplit=None):
|
|
||||||
result = []
|
|
||||||
current_element = []
|
|
||||||
iterator = iter(string)
|
|
||||||
count_split = 0
|
|
||||||
skip_split = False
|
|
||||||
|
|
||||||
for character in iterator:
|
|
||||||
if maxsplit and count_split >= maxsplit:
|
|
||||||
skip_split = True
|
|
||||||
|
|
||||||
if character == escape and not skip_split:
|
|
||||||
try:
|
|
||||||
next_character = next(iterator)
|
|
||||||
if next_character != delimiter and next_character != escape:
|
|
||||||
# Do not copy the escape character if it is intended to escape either the
|
|
||||||
# delimiter or the escape character itself. Copy the escape character
|
|
||||||
# if it is not used to escape either of these characters.
|
|
||||||
current_element.append(escape)
|
|
||||||
current_element.append(next_character)
|
|
||||||
count_split += 1
|
|
||||||
except StopIteration:
|
|
||||||
current_element.append(escape)
|
|
||||||
elif character == delimiter and not skip_split:
|
|
||||||
result.append("".join(current_element))
|
|
||||||
current_element = []
|
|
||||||
count_split += 1
|
|
||||||
else:
|
|
||||||
current_element.append(character)
|
|
||||||
|
|
||||||
result.append("".join(current_element))
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def sysexit(code=1):
|
|
||||||
sys.exit(code)
|
|
||||||
|
|
||||||
|
|
||||||
def sysexit_with_message(msg, code=1, **kwargs):
|
|
||||||
structlog.get_logger().critical(str(msg).strip(), **kwargs)
|
|
||||||
sysexit(code)
|
|
||||||
|
|
||||||
|
|
||||||
class Singleton(type):
|
|
||||||
"""Meta singleton class."""
|
|
||||||
|
|
||||||
_instances = {}
|
|
||||||
|
|
||||||
def __call__(cls, *args, **kwargs):
|
|
||||||
if cls not in cls._instances:
|
|
||||||
cls._instances[cls] = super().__call__(*args, **kwargs)
|
|
||||||
return cls._instances[cls]
|
|
||||||
|
|
||||||
|
|
||||||
class FileUtils:
|
|
||||||
"""Mics static methods for file handling."""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def create_path(path):
|
|
||||||
os.makedirs(path, exist_ok=True)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def query_yes_no(question, default=True):
|
|
||||||
"""
|
|
||||||
Ask a yes/no question via input() and return their answer.
|
|
||||||
|
|
||||||
"question" is a string that is presented to the user.
|
|
||||||
"default" is the presumed answer if the user just hits <Enter>.
|
|
||||||
It must be "yes" (the default), "no" or None (meaning
|
|
||||||
an answer is required of the user).
|
|
||||||
|
|
||||||
The "answer" return value is one of "yes" or "no".
|
|
||||||
"""
|
|
||||||
prompt = "[Y/n]" if default else "[N/y]"
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
# input method is safe in python3
|
|
||||||
choice = input(f"{question} {prompt} ") or default # nosec
|
|
||||||
return to_bool(choice)
|
|
||||||
except ValueError:
|
|
||||||
print("Invalid input. Please enter 'y' or 'n'.") # noqa: T201
|
|
||||||
except KeyboardInterrupt as e:
|
|
||||||
raise e
|
|
@ -1,78 +0,0 @@
|
|||||||
"""Utils for YAML file operations."""
|
|
||||||
|
|
||||||
from collections import defaultdict
|
|
||||||
from contextlib import suppress
|
|
||||||
|
|
||||||
import ruamel.yaml
|
|
||||||
import yaml
|
|
||||||
from ansible.parsing.yaml.loader import AnsibleLoader
|
|
||||||
|
|
||||||
import ansibledoctor.exception
|
|
||||||
|
|
||||||
|
|
||||||
class UnsafeTag:
|
|
||||||
"""Handle custom yaml unsafe tag."""
|
|
||||||
|
|
||||||
yaml_tag = "!unsafe"
|
|
||||||
|
|
||||||
def __init__(self, value):
|
|
||||||
self.unsafe = value
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def yaml_constructor(loader, node):
|
|
||||||
return loader.construct_scalar(node)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_yaml_ansible(yamlfile):
|
|
||||||
try:
|
|
||||||
loader = AnsibleLoader(yamlfile)
|
|
||||||
data = loader.get_single_data() or []
|
|
||||||
except (
|
|
||||||
yaml.parser.ParserError,
|
|
||||||
yaml.scanner.ScannerError,
|
|
||||||
yaml.constructor.ConstructorError,
|
|
||||||
yaml.composer.ComposerError,
|
|
||||||
) as e:
|
|
||||||
raise ansibledoctor.exception.YAMLError(e) from e
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def parse_yaml(yamlfile):
|
|
||||||
try:
|
|
||||||
ruamel.yaml.add_constructor(
|
|
||||||
UnsafeTag.yaml_tag,
|
|
||||||
UnsafeTag.yaml_constructor,
|
|
||||||
constructor=ruamel.yaml.SafeConstructor,
|
|
||||||
)
|
|
||||||
|
|
||||||
data = ruamel.yaml.YAML(typ="rt").load(yamlfile)
|
|
||||||
_yaml_remove_comments(data)
|
|
||||||
data = defaultdict(dict, data or {})
|
|
||||||
except (
|
|
||||||
ruamel.yaml.parser.ParserError,
|
|
||||||
ruamel.yaml.scanner.ScannerError,
|
|
||||||
ruamel.yaml.constructor.ConstructorError,
|
|
||||||
ruamel.yaml.composer.ComposerError,
|
|
||||||
) as e:
|
|
||||||
raise ansibledoctor.exception.YAMLError(e) from e
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def _yaml_remove_comments(d):
|
|
||||||
if isinstance(d, dict):
|
|
||||||
for k, v in d.items():
|
|
||||||
_yaml_remove_comments(k)
|
|
||||||
_yaml_remove_comments(v)
|
|
||||||
elif isinstance(d, list):
|
|
||||||
for elem in d:
|
|
||||||
_yaml_remove_comments(elem)
|
|
||||||
|
|
||||||
with suppress(AttributeError):
|
|
||||||
attr = (
|
|
||||||
"comment"
|
|
||||||
if isinstance(d, ruamel.yaml.scalarstring.ScalarString)
|
|
||||||
else ruamel.yaml.comments.Comment.attrib
|
|
||||||
)
|
|
||||||
delattr(d, attr)
|
|
@ -1,4 +1,4 @@
|
|||||||
FROM python:3.13-alpine@sha256:fcbcbbecdeae71d3b77445d9144d1914df55110f825ab62b04a66c7c33c09373
|
FROM python:3.10-alpine@sha256:a9865ba6472324621e81e1da5cbd02069d528215a4b49d49695eac693c10488a
|
||||||
|
|
||||||
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
|
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
|
||||||
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
|
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
|
||||||
@ -12,9 +12,9 @@ ENV TZ=UTC
|
|||||||
|
|
||||||
ADD dist/ansible_doctor-*.whl /
|
ADD dist/ansible_doctor-*.whl /
|
||||||
|
|
||||||
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev git && \
|
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev && \
|
||||||
pip install --upgrade --no-cache-dir pip && \
|
pip install --upgrade --no-cache-dir pip && \
|
||||||
pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl")[ansible-core] && \
|
pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl") && \
|
||||||
rm -f ansible_doctor-*.whl && \
|
rm -f ansible_doctor-*.whl && \
|
||||||
rm -rf /var/cache/apk/* && \
|
rm -rf /var/cache/apk/* && \
|
||||||
rm -rf /root/.cache/
|
rm -rf /root/.cache/
|
24
docker/Dockerfile.arm
Normal file
24
docker/Dockerfile.arm
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
FROM arm32v7/python:3.10-alpine@sha256:9871be3035a0832069bdb8fb5c486025af6d0c0a77e1111bbc1807f638a5e2b6
|
||||||
|
|
||||||
|
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
|
||||||
|
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
|
||||||
|
LABEL org.opencontainers.image.title="ansible-doctor"
|
||||||
|
LABEL org.opencontainers.image.url="https://ansible-doctor.geekdocs.de/"
|
||||||
|
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-doctor"
|
||||||
|
LABEL org.opencontainers.image.documentation="https://ansible-doctor.geekdocs.de/"
|
||||||
|
|
||||||
|
ENV PY_COLORS=1
|
||||||
|
ENV TZ=UTC
|
||||||
|
|
||||||
|
ADD dist/ansible_doctor-*.whl /
|
||||||
|
|
||||||
|
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev && \
|
||||||
|
pip install --upgrade --no-cache-dir pip && \
|
||||||
|
pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl") && \
|
||||||
|
rm -f ansible_doctor-*.whl && \
|
||||||
|
rm -rf /var/cache/apk/* && \
|
||||||
|
rm -rf /root/.cache/
|
||||||
|
|
||||||
|
USER root
|
||||||
|
CMD []
|
||||||
|
ENTRYPOINT ["/usr/local/bin/ansible-doctor"]
|
24
docker/Dockerfile.arm64
Normal file
24
docker/Dockerfile.arm64
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
FROM arm64v8/python:3.10-alpine@sha256:5fae85eea1d49fa7e0b60ab7091922329d1f573a3359f9dc438ac0614169c22a
|
||||||
|
|
||||||
|
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
|
||||||
|
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
|
||||||
|
LABEL org.opencontainers.image.title="ansible-doctor"
|
||||||
|
LABEL org.opencontainers.image.url="https://ansible-doctor.geekdocs.de/"
|
||||||
|
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-doctor"
|
||||||
|
LABEL org.opencontainers.image.documentation="https://ansible-doctor.geekdocs.de/"
|
||||||
|
|
||||||
|
ENV PY_COLORS=1
|
||||||
|
ENV TZ=UTC
|
||||||
|
|
||||||
|
ADD dist/ansible_doctor-*.whl /
|
||||||
|
|
||||||
|
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev && \
|
||||||
|
pip install --upgrade --no-cache-dir pip && \
|
||||||
|
pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl") && \
|
||||||
|
rm -f ansible_doctor-*.whl && \
|
||||||
|
rm -rf /var/cache/apk/* && \
|
||||||
|
rm -rf /root/.cache/
|
||||||
|
|
||||||
|
USER root
|
||||||
|
CMD []
|
||||||
|
ENTRYPOINT ["/usr/local/bin/ansible-doctor"]
|
24
docker/manifest-quay.tmpl
Normal file
24
docker/manifest-quay.tmpl
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
image: quay.io/thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
|
||||||
|
{{#if build.tags}}
|
||||||
|
tags:
|
||||||
|
{{#each build.tags}}
|
||||||
|
- {{this}}
|
||||||
|
{{/each}}
|
||||||
|
{{/if}}
|
||||||
|
manifests:
|
||||||
|
- image: quay.io/thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
|
||||||
|
platform:
|
||||||
|
architecture: amd64
|
||||||
|
os: linux
|
||||||
|
|
||||||
|
- image: quay.io/thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
|
||||||
|
platform:
|
||||||
|
architecture: arm64
|
||||||
|
os: linux
|
||||||
|
variant: v8
|
||||||
|
|
||||||
|
- image: quay.io/thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
|
||||||
|
platform:
|
||||||
|
architecture: arm
|
||||||
|
os: linux
|
||||||
|
variant: v7
|
24
docker/manifest.tmpl
Normal file
24
docker/manifest.tmpl
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
image: thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
|
||||||
|
{{#if build.tags}}
|
||||||
|
tags:
|
||||||
|
{{#each build.tags}}
|
||||||
|
- {{this}}
|
||||||
|
{{/each}}
|
||||||
|
{{/if}}
|
||||||
|
manifests:
|
||||||
|
- image: thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
|
||||||
|
platform:
|
||||||
|
architecture: amd64
|
||||||
|
os: linux
|
||||||
|
|
||||||
|
- image: thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
|
||||||
|
platform:
|
||||||
|
architecture: arm64
|
||||||
|
os: linux
|
||||||
|
variant: v8
|
||||||
|
|
||||||
|
- image: thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
|
||||||
|
platform:
|
||||||
|
architecture: arm
|
||||||
|
os: linux
|
||||||
|
variant: v7
|
@ -2,7 +2,7 @@
|
|||||||
title: Documentation
|
title: Documentation
|
||||||
---
|
---
|
||||||
|
|
||||||
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
|
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-doctor?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-doctor)
|
||||||
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-doctor)
|
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-doctor)
|
||||||
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-doctor)
|
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-doctor)
|
||||||
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-doctor.svg)](https://pypi.org/project/ansible-doctor/)
|
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-doctor.svg)](https://pypi.org/project/ansible-doctor/)
|
||||||
|
@ -4,9 +4,10 @@ title: Using docker
|
|||||||
|
|
||||||
```Shell
|
```Shell
|
||||||
docker run \
|
docker run \
|
||||||
-e ANSIBLE_DOCTOR_BASE_DIR=example/demo-role/ \
|
-e ANSIBLE_DOCTOR_ROLE_DIR=example/demo-role/ \
|
||||||
|
-e ANSIBLE_DOCTOR_OUTPUT_DIR=example/ \
|
||||||
-e ANSIBLE_DOCTOR_FORCE_OVERWRITE=true \
|
-e ANSIBLE_DOCTOR_FORCE_OVERWRITE=true \
|
||||||
-e ANSIBLE_DOCTOR_CUSTOM_HEADER=HEADER.md \
|
-e ANSIBLE_DOCTOR_CUSTOM_HEADER=example/demo-role/HEADER.md \
|
||||||
-e ANSIBLE_DOCTOR_LOG_LEVEL=info \
|
-e ANSIBLE_DOCTOR_LOG_LEVEL=info \
|
||||||
-e PY_COLORS=1 \
|
-e PY_COLORS=1 \
|
||||||
-v $(pwd):/doctor \
|
-v $(pwd):/doctor \
|
||||||
@ -14,6 +15,7 @@ docker run \
|
|||||||
thegeeklab/ansible-doctor
|
thegeeklab/ansible-doctor
|
||||||
```
|
```
|
||||||
|
|
||||||
{{< hint type=note >}}
|
{{< hint info >}}
|
||||||
|
**Info**\
|
||||||
Keep in mind, that SELinux labels (`:Z` or `:z`) need to be passed as mount option on SELinux enabled systems.
|
Keep in mind, that SELinux labels (`:Z` or `:z`) need to be passed as mount option on SELinux enabled systems.
|
||||||
{{< /hint >}}
|
{{< /hint >}}
|
||||||
|
@ -4,12 +4,11 @@ title: Using pip
|
|||||||
|
|
||||||
```Shell
|
```Shell
|
||||||
# From PyPI as unprivileged user
|
# From PyPI as unprivileged user
|
||||||
$ pip install ansible-doctor[ansible-core] --user
|
$ pip install ansible-doctor --user
|
||||||
|
|
||||||
# .. or as root
|
# .. or as root
|
||||||
$ sudo pip install ansible-doctor[ansible-core]
|
$ sudo pip install ansible-doctor
|
||||||
|
|
||||||
# From Wheel file
|
# From Wheel file
|
||||||
# Please check first whether a newer version is available.
|
$ pip install https://github.com/thegeeklab/ansible-doctor/releases/download/v0.1.1/ansible_doctor-0.1.1-py2.py3-none-any.whl
|
||||||
$ pip install https://github.com/thegeeklab/ansible-doctor/releases/download/v3.1.4/ansible_doctor-3.1.4-py2.py3-none-any.whl[ansible-core]
|
|
||||||
```
|
```
|
||||||
|
@ -18,104 +18,59 @@ Configuration options can be set in different places, which are processed in the
|
|||||||
|
|
||||||
```YAML
|
```YAML
|
||||||
---
|
---
|
||||||
# Default is the current working directory.
|
# default is the current working directory
|
||||||
base_dir:
|
role_dir:
|
||||||
|
# default is the basename of 'role_name'
|
||||||
role:
|
role_name:
|
||||||
# Default is the basename of 'role_name'.
|
# Auto-detect if the given directory is a role, can be disabled
|
||||||
name:
|
# to parse loose files instead.
|
||||||
# Auto-detect if the given directory is a role, can be disabled
|
role_detection: True
|
||||||
# to parse loose files instead.
|
# don't write anything to file system
|
||||||
autodetect: True
|
|
||||||
|
|
||||||
# Don't write anything to file system.
|
|
||||||
dry_run: False
|
dry_run: False
|
||||||
|
|
||||||
|
logging:
|
||||||
|
# possible options debug | info | warning | error | critical
|
||||||
|
level: "warning"
|
||||||
|
# json logging can be enabled if a parsable output is required
|
||||||
|
json: False
|
||||||
|
|
||||||
|
# path to write rendered template file
|
||||||
|
# default is the current working directory
|
||||||
|
output_dir:
|
||||||
|
# default is in-build templates directory
|
||||||
|
template_dir:
|
||||||
|
template: readme
|
||||||
|
|
||||||
|
# don't ask to overwrite if output file exists
|
||||||
|
force_overwrite: False
|
||||||
|
# load custom header from given file and append template output
|
||||||
|
# to it before write.
|
||||||
|
custom_header: ""
|
||||||
|
|
||||||
exclude_files: []
|
exclude_files: []
|
||||||
# Examples
|
# Examples
|
||||||
# exclude_files:
|
# exclude_files:
|
||||||
# - molecule/
|
# - molecule/
|
||||||
# - files/**/*.py
|
# - files/**/*.py
|
||||||
|
|
||||||
# Exclude tags from automatic detection. Configured tags are only skipped
|
|
||||||
# if the tag is not used in an annotation.
|
|
||||||
exclude_tags: []
|
|
||||||
|
|
||||||
logging:
|
|
||||||
# Possible options: debug|info|warning| error|critical
|
|
||||||
level: "warning"
|
|
||||||
# JSON logging can be enabled if a parsable output is required.
|
|
||||||
json: False
|
|
||||||
|
|
||||||
template:
|
|
||||||
# Name of the template to be used. In most cases, this is the name of a directory that is attached to the
|
|
||||||
# the `src` path or Git repo (see example below).
|
|
||||||
name: readme
|
|
||||||
|
|
||||||
# Template provider source. Currently supported providers are `local|git`.
|
|
||||||
# The `local` provider loads templates from the local file system. This provider
|
|
||||||
# is used by default and uses the built-in templates.
|
|
||||||
#
|
|
||||||
# Examples:
|
|
||||||
# template:
|
|
||||||
# name: readme
|
|
||||||
# src: local>/tmp/custom_templates/
|
|
||||||
#
|
|
||||||
# The `git` provider allows templates to be loaded from a git repository. At the moment
|
|
||||||
# the functions of this provider are limited and only public repositories are supported.
|
|
||||||
#
|
|
||||||
# Examples:
|
|
||||||
# template:
|
|
||||||
# src: git>https://github.com/thegeeklab/ansible-doctor
|
|
||||||
# name: ansibledoctor/templates/readme
|
|
||||||
#
|
|
||||||
# template:
|
|
||||||
# src: git>git@github.com:thegeeklab/ansible-doctor.git
|
|
||||||
# name: ansibledoctor/templates/readme
|
|
||||||
#
|
|
||||||
# template:
|
|
||||||
# src: git>git@github.com:thegeeklab/ansible-doctor.git#branch-or-tag
|
|
||||||
# name: ansibledoctor/templates/readme
|
|
||||||
src:
|
|
||||||
|
|
||||||
options:
|
|
||||||
# Configures whether to tabulate variables in the output. When set to `True`,
|
|
||||||
# variables will be displayed in a tabular format intsead of plain marktdown sections.
|
|
||||||
# NOTE: This option does not support rendering multiline code blocks.
|
|
||||||
tabulate_vars: False
|
|
||||||
|
|
||||||
renderer:
|
|
||||||
# By default, double spaces, spaces before and after line breaks or tab characters, etc.
|
|
||||||
# are automatically removed before the template is rendered. As a result, indenting
|
|
||||||
# with spaces does not work. If you want to use spaces to indent text, you must disable
|
|
||||||
# this option.
|
|
||||||
autotrim: True
|
|
||||||
# Load custom header from given file and append template output to it before write.
|
|
||||||
include_header: ""
|
|
||||||
# Path to write rendered template file. Default is the current working directory.
|
|
||||||
dest:
|
|
||||||
# Don't ask to overwrite if output file exists.
|
|
||||||
force_overwrite: False
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## CLI
|
## CLI
|
||||||
|
|
||||||
```Shell
|
```Shell
|
||||||
$ ansible-doctor --help
|
$ ansible-doctor --help
|
||||||
usage: ansible-doctor [-h] [-c CONFIG_FILE] [-o OUTPUT_DIR] [-r] [-f] [-d] [-n] [-v] [-q] [--version] [base_dir]
|
usage: ansible-doctor [-h] [-c CONFIG_FILE] [-o OUTPUT_DIR] [-f] [-d] [-n] [-v] [-q] [--version] [role_dir]
|
||||||
|
|
||||||
Generate documentation from annotated Ansible roles using templates
|
Generate documentation from annotated Ansible roles using templates
|
||||||
|
|
||||||
positional arguments:
|
positional arguments:
|
||||||
base_dir base directory (default: current working directory)
|
role_dir role directory (default: current working dir)
|
||||||
|
|
||||||
options:
|
optional arguments:
|
||||||
-h, --help show this help message and exit
|
-h, --help show this help message and exit
|
||||||
-c CONFIG_FILE, --config CONFIG_FILE
|
-c CONFIG_FILE, --config CONFIG_FILE
|
||||||
path to configuration file
|
location of configuration file
|
||||||
-o OUTPUT_DIR, --output OUTPUT_DIR
|
-o OUTPUT_DIR, --output OUTPUT_DIR
|
||||||
output directory
|
output base dir
|
||||||
-r, --recursive run recursively over the base directory subfolders
|
|
||||||
-f, --force force overwrite output file
|
-f, --force force overwrite output file
|
||||||
-d, --dry-run dry run without writing
|
-d, --dry-run dry run without writing
|
||||||
-n, --no-role-detection
|
-n, --no-role-detection
|
||||||
@ -127,48 +82,19 @@ options:
|
|||||||
|
|
||||||
## Environment Variables
|
## Environment Variables
|
||||||
|
|
||||||
{{< hint type=note >}}
|
|
||||||
List configuration options need to be passed as JSON strings.
|
|
||||||
{{< /hint >}}
|
|
||||||
|
|
||||||
```Shell
|
```Shell
|
||||||
ANSIBLE_DOCTOR_BASE_DIR=
|
ANSIBLE_DOCTOR_CONFIG_FILE=
|
||||||
ANSIBLE_DOCTOR_DRY_RUN=False
|
ANSIBLE_DOCTOR_ROLE_DETECTION=true
|
||||||
ANSIBLE_DOCTOR_EXCLUDE_FILES="['molecule/']"
|
ANSIBLE_DOCTOR_ROLE_DIR=
|
||||||
ANSIBLE_DOCTOR_EXCLUDE_TAGS="[]"
|
ANSIBLE_DOCTOR_ROLE_NAME=
|
||||||
|
ANSIBLE_DOCTOR_DRY_RUN=false
|
||||||
ANSIBLE_DOCTOR_ROLE__NAME=
|
ANSIBLE_DOCTOR_LOG_LEVEL=warning
|
||||||
ANSIBLE_DOCTOR_ROLE__AUTODETECT=True
|
ANSIBLE_DOCTOR_LOG_JSON=false
|
||||||
|
ANSIBLE_DOCTOR_OUTPUT_DIR=
|
||||||
ANSIBLE_DOCTOR_LOGGING__LEVEL="warning"
|
ANSIBLE_DOCTOR_TEMPLATE_DIR=
|
||||||
ANSIBLE_DOCTOR_LOGGING__JSON=False
|
ANSIBLE_DOCTOR_TEMPLATE=readme
|
||||||
|
ANSIBLE_DOCTOR_FORCE_OVERWRITE=false
|
||||||
ANSIBLE_DOCTOR_TEMPLATE__NAME=readme
|
ANSIBLE_DOCTOR_CUSTOM_HEADER=
|
||||||
ANSIBLE_DOCTOR_TEMPLATE__SRC=
|
ANSIBLE_DOCTOR_EXCLUDE_FILES=
|
||||||
ANSIBLE_DOCTOR_TEMPLATE__OPTIONS__TABULATE_VARS=False
|
ANSIBLE_DOCTOR_EXCLUDE_FILES=molecule/,files/**/*.py
|
||||||
|
|
||||||
ANSIBLE_DOCTOR_RENDERER__AUTOTRIM=True
|
|
||||||
ANSIBLE_DOCTOR_RENDERER__INCLUDE_HEADER=
|
|
||||||
ANSIBLE_DOCTOR_RENDERER__DEST=
|
|
||||||
ANSIBLE_DOCTOR_RENDERER__FORCE_OVERWRITE=False
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Pre-Commit setup
|
|
||||||
|
|
||||||
To use _ansible-doctor_ with the [pre-commit](https://pre-commit.com/) framework, add the following to the `.pre-commit-config.yaml` file in your local repository.
|
|
||||||
|
|
||||||
<!-- prettier-ignore-start -->
|
|
||||||
<!-- markdownlint-disable -->
|
|
||||||
<!-- spellchecker-disable -->
|
|
||||||
|
|
||||||
{{< highlight yaml "linenos=table" >}}
|
|
||||||
- repo: https://github.com/thegeeklab/ansible-doctor
|
|
||||||
# update version with `pre-commit autoupdate`
|
|
||||||
rev: v4.0.4
|
|
||||||
hooks:
|
|
||||||
- id: ansible-doctor
|
|
||||||
{{< /highlight >}}
|
|
||||||
|
|
||||||
<!-- spellchecker-enable -->
|
|
||||||
<!-- markdownlint-restore -->
|
|
||||||
<!-- prettier-ignore-end -->
|
|
||||||
|
@ -12,7 +12,7 @@ ansible-doctor FOLDER
|
|||||||
|
|
||||||
If no folder is passed to _ansible-doctor_, the current working directory is used. The first step is to determine if the specified folder is an Ansible role. This check is very simple and only verifies if there is a sub-directory named `tasks` in the specified folder. After a successful check, _ansible-doctor_ registers all files of the role to search them for annotations.
|
If no folder is passed to _ansible-doctor_, the current working directory is used. The first step is to determine if the specified folder is an Ansible role. This check is very simple and only verifies if there is a sub-directory named `tasks` in the specified folder. After a successful check, _ansible-doctor_ registers all files of the role to search them for annotations.
|
||||||
|
|
||||||
Without any further work _ansible-doctor_ can already create a documentation of the available variables and some meta information if the role contains. This basic information can be extended with a set of available annotations. If you want to see it in action you can find a [demo role](https://github.com/thegeeklab/ansible-doctor/tree/main/example) with a lot of examples in the repository.
|
Without any further work _ansible-doctor_ can already create a documentation of the available variables and some meta information if the role contains [meta information](https://galaxy.ansible.com/docs/contributing/creating_role.html#role-metadata). This basic information can be extended with a set of available annotations.
|
||||||
|
|
||||||
## Annotations
|
## Annotations
|
||||||
|
|
||||||
@ -33,7 +33,7 @@ option1
|
|||||||
# the default description with an annotation.
|
# the default description with an annotation.
|
||||||
# @end
|
# @end
|
||||||
|
|
||||||
# @meta author:value: [John Doe](https://blog.example.com)
|
# @meta author: [John Doe](https://blog.example.com)
|
||||||
```
|
```
|
||||||
|
|
||||||
### `@var`
|
### `@var`
|
||||||
@ -44,28 +44,16 @@ option1
|
|||||||
: the name of the variable to which additional information should be added
|
: the name of the variable to which additional information should be added
|
||||||
|
|
||||||
option2
|
option2
|
||||||
: supports `["value", "example", "description", "type", "deprecated"]` as information scopes
|
: supports `["value", "example", "description"]` as information scopes
|
||||||
|
|
||||||
#### `value`
|
**Example:**
|
||||||
|
|
||||||
```yaml
|
```YAML
|
||||||
# @var docker_registry_password:value: $ "secret"
|
# @var docker_registry_password:value: "secure_overwrite"
|
||||||
docker_registry_password: "secret"
|
# @var docker_registry_password: "secure_overwrite"
|
||||||
```
|
|
||||||
|
|
||||||
#### `example`
|
# @var docker_registry_password:example: "%8gv_5GA?"
|
||||||
|
|
||||||
```yaml
|
|
||||||
# @var docker_registry_password:example: $ "randomPassw0rd"
|
|
||||||
# @var docker_registry_password:example: >
|
|
||||||
# docker_registry_password: "randomPassw0rd"
|
|
||||||
# @end
|
|
||||||
docker_registry_password: "secret"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `description`
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# @var docker_registry_password:description: Very secure password to login to the docker registry.
|
# @var docker_registry_password:description: Very secure password to login to the docker registry.
|
||||||
# @var docker_registry_password:description: >
|
# @var docker_registry_password:description: >
|
||||||
# Multi line description are possible as well.
|
# Multi line description are possible as well.
|
||||||
@ -74,21 +62,6 @@ docker_registry_password: "secret"
|
|||||||
docker_registry_password: "secret"
|
docker_registry_password: "secret"
|
||||||
```
|
```
|
||||||
|
|
||||||
#### `type`
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# @var docker_registry_password:type: string
|
|
||||||
docker_registry_password: "secret"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### `deprecated`
|
|
||||||
|
|
||||||
```yaml
|
|
||||||
# @var docker_registry_password:deprecated: true
|
|
||||||
# @var docker_registry_password:deprecated: since v1.0.0
|
|
||||||
docker_registry_password: "secret"
|
|
||||||
```
|
|
||||||
|
|
||||||
### `@tag`
|
### `@tag`
|
||||||
|
|
||||||
Used tags within the Ansible task files will be auto-discovered. This identifier can be used to define tags manually or add extended information to discovered tags.
|
Used tags within the Ansible task files will be auto-discovered. This identifier can be used to define tags manually or add extended information to discovered tags.
|
||||||
|
162
docs/static/socialmedia.svg
vendored
162
docs/static/socialmedia.svg
vendored
File diff suppressed because one or more lines are too long
Before Width: | Height: | Size: 20 KiB |
BIN
docs/static/socialmedia2.png
vendored
BIN
docs/static/socialmedia2.png
vendored
Binary file not shown.
Before Width: | Height: | Size: 30 KiB After Width: | Height: | Size: 37 KiB |
@ -1,24 +1,17 @@
|
|||||||
# demo-role-custom-header
|
# demo-role-custom-header
|
||||||
|
|
||||||
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
|
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-doctor?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-doctor)
|
||||||
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)
|
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)
|
||||||
|
|
||||||
Role to demonstrate ansible-doctor. It is also possible to overwrite
|
Role to demonstrate ansible-doctor. It is also possible to overwrite the default description with an annotation.
|
||||||
the default description with an annotation.
|
|
||||||
|
|
||||||
## Table of content
|
## Table of content
|
||||||
|
|
||||||
- [Requirements](#requirements)
|
|
||||||
- [Default Variables](#default-variables)
|
- [Default Variables](#default-variables)
|
||||||
- [demo_bool](#demo_bool)
|
|
||||||
- [demo_role_deprecated](#demo_role_deprecated)
|
|
||||||
- [demo_role_deprecated_info](#demo_role_deprecated_info)
|
|
||||||
- [demo_role_dict](#demo_role_dict)
|
- [demo_role_dict](#demo_role_dict)
|
||||||
- [demo_role_empty](#demo_role_empty)
|
- [demo_role_empty](#demo_role_empty)
|
||||||
- [demo_role_empty_dict](#demo_role_empty_dict)
|
- [demo_role_empty_dict](#demo_role_empty_dict)
|
||||||
- [demo_role_other_tags](#demo_role_other_tags)
|
- [demo_role_other_tags](#demo_role_other_tags)
|
||||||
- [demo_role_override](#demo_role_override)
|
|
||||||
- [demo_role_override_complex](#demo_role_override_complex)
|
|
||||||
- [demo_role_single](#demo_role_single)
|
- [demo_role_single](#demo_role_single)
|
||||||
- [demo_role_undefined_var](#demo_role_undefined_var)
|
- [demo_role_undefined_var](#demo_role_undefined_var)
|
||||||
- [demo_role_unset](#demo_role_unset)
|
- [demo_role_unset](#demo_role_unset)
|
||||||
@ -30,47 +23,8 @@ the default description with an annotation.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
- Minimum Ansible version: `2.10`
|
|
||||||
|
|
||||||
## Default Variables
|
## Default Variables
|
||||||
|
|
||||||
### demo_bool
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
demo_bool: true
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Example usage
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
demo_bool: false
|
|
||||||
```
|
|
||||||
|
|
||||||
### demo_role_deprecated
|
|
||||||
|
|
||||||
**_Deprecated_**<br />
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
demo_role_deprecated: b
|
|
||||||
```
|
|
||||||
|
|
||||||
### demo_role_deprecated_info
|
|
||||||
|
|
||||||
**_Deprecated:_** This variable is deprected since `v2.0.0` and will be removed in a future release.<br />
|
|
||||||
**_Type:_** string<br />
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
demo_role_deprecated_info: a
|
|
||||||
```
|
|
||||||
|
|
||||||
### demo_role_dict
|
### demo_role_dict
|
||||||
|
|
||||||
#### Default value
|
#### Default value
|
||||||
@ -105,8 +59,7 @@ demo_role_empty: ''
|
|||||||
|
|
||||||
### demo_role_empty_dict
|
### demo_role_empty_dict
|
||||||
|
|
||||||
... or valid json can be used. In this case, the json will be automatically prefixed with the annotation key
|
... or valid json can be used. In this case, the json will be automatically prefixed with the annotation key and filters like `to_nice_yaml` can be used in templates. To get it working, the json need to be prefixed with a `$`.
|
||||||
and filters like `to_nice_yaml` can be used in templates. To get it working, the json need to be prefixed with a `$`.
|
|
||||||
|
|
||||||
#### Default value
|
#### Default value
|
||||||
|
|
||||||
@ -144,24 +97,6 @@ demo_role_other_tags:
|
|||||||
- package2
|
- package2
|
||||||
```
|
```
|
||||||
|
|
||||||
### demo_role_override
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
demo_role_override: test
|
|
||||||
```
|
|
||||||
|
|
||||||
### demo_role_override_complex
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
demo_role_override_complex:
|
|
||||||
foo: bar
|
|
||||||
second: value
|
|
||||||
```
|
|
||||||
|
|
||||||
### demo_role_single
|
### demo_role_single
|
||||||
|
|
||||||
#### Default value
|
#### Default value
|
||||||
@ -172,12 +107,7 @@ demo_role_single: b
|
|||||||
|
|
||||||
### demo_role_undefined_var
|
### demo_role_undefined_var
|
||||||
|
|
||||||
To highlight a variable that has not set a value by default, this is one way to achieve it.
|
To highlight a variable that has not set a value by default, this is one way to achieve it. Make sure to flag it as json value: `@var demo_role_undefined_var: $ "_unset_"`
|
||||||
Make sure to flag it as json value: `@var demo_role_undefined_var: $ "_unset_"`
|
|
||||||
|
|
||||||
| Attribute | Description |
|
|
||||||
| --- | --- |
|
|
||||||
| value1 | desc1 |
|
|
||||||
|
|
||||||
#### Default value
|
#### Default value
|
||||||
|
|
||||||
@ -219,7 +149,7 @@ demo_role_unset: some_value
|
|||||||
|
|
||||||
## Dependencies
|
## Dependencies
|
||||||
|
|
||||||
- role2
|
None.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
@ -227,4 +157,4 @@ MIT
|
|||||||
|
|
||||||
## Author
|
## Author
|
||||||
|
|
||||||
[John Doe](https://blog.example.com)
|
John Doe
|
@ -1,10 +1,5 @@
|
|||||||
---
|
---
|
||||||
|
custom_header: HEADER.md
|
||||||
logging:
|
logging:
|
||||||
level: debug
|
level: debug
|
||||||
|
template: readme
|
||||||
template:
|
|
||||||
src: git>https://github.com/thegeeklab/ansible-doctor
|
|
||||||
name: ansibledoctor/templates/readme
|
|
||||||
|
|
||||||
renderer:
|
|
||||||
include_header: HEADER.md
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
# demo-role-custom-header
|
# demo-role-custom-header
|
||||||
|
|
||||||
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
|
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-doctor?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-doctor)
|
||||||
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)
|
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)
|
||||||
|
@ -6,9 +6,6 @@ demo_role_unset:
|
|||||||
demo_role_empty: ""
|
demo_role_empty: ""
|
||||||
demo_role_single: "b"
|
demo_role_single: "b"
|
||||||
|
|
||||||
# @var demo_bool:example: $ false
|
|
||||||
demo_bool: true
|
|
||||||
|
|
||||||
# @var demo_role_empty_dict:description: >
|
# @var demo_role_empty_dict:description: >
|
||||||
# ... or valid json can be used. In this case, the json will be automatically prefixed with the annotation key
|
# ... or valid json can be used. In this case, the json will be automatically prefixed with the annotation key
|
||||||
# and filters like `to_nice_yaml` can be used in templates. To get it working, the json need to be prefixed with a `$`.
|
# and filters like `to_nice_yaml` can be used in templates. To get it working, the json need to be prefixed with a `$`.
|
||||||
@ -34,11 +31,6 @@ demo_role_dict:
|
|||||||
# @var demo_role_undefined_var:description: >
|
# @var demo_role_undefined_var:description: >
|
||||||
# To highlight a variable that has not set a value by default, this is one way to achieve it.
|
# To highlight a variable that has not set a value by default, this is one way to achieve it.
|
||||||
# Make sure to flag it as json value: `@var demo_role_undefined_var: $ "_unset_"`
|
# Make sure to flag it as json value: `@var demo_role_undefined_var: $ "_unset_"`
|
||||||
#
|
|
||||||
# | Attribute | Description |
|
|
||||||
# | --- | --- |
|
|
||||||
# | value1 | desc1 |
|
|
||||||
#
|
|
||||||
# @end
|
# @end
|
||||||
# @var demo_role_undefined_var: $ "_unset_"
|
# @var demo_role_undefined_var: $ "_unset_"
|
||||||
|
|
||||||
@ -52,19 +44,3 @@ demo_role_dict:
|
|||||||
# ]
|
# ]
|
||||||
# @end
|
# @end
|
||||||
demo_role_other_tags: []
|
demo_role_other_tags: []
|
||||||
|
|
||||||
## Simple value
|
|
||||||
# @var demo_role_override: $ "test"
|
|
||||||
demo_role_override: original
|
|
||||||
|
|
||||||
## Complex value
|
|
||||||
# @var demo_role_override_complex:value: $ {"foo":"bar", "second":"value"}
|
|
||||||
demo_role_override_complex: {}
|
|
||||||
|
|
||||||
# @var demo_role_deprecated:deprecated:
|
|
||||||
demo_role_deprecated: "b"
|
|
||||||
|
|
||||||
# @var demo_role_deprecated_info:deprecated: >
|
|
||||||
# This variable is deprected since `v2.0.0` and will be removed in a future release.
|
|
||||||
# @var demo_role_deprecated_info:type: string
|
|
||||||
demo_role_deprecated_info: "a"
|
|
||||||
|
@ -3,20 +3,17 @@
|
|||||||
# Role to demonstrate ansible-doctor. It is also possible to overwrite
|
# Role to demonstrate ansible-doctor. It is also possible to overwrite
|
||||||
# the default description with an annotation.
|
# the default description with an annotation.
|
||||||
# @end
|
# @end
|
||||||
# @meta author: [John Doe](https\://blog.example.com)
|
# @meta author: [John Doe](https://blog.example.com)
|
||||||
galaxy_info:
|
galaxy_info:
|
||||||
description: Role to demonstrate ansible-doctor.
|
description: Role to demonstrate ansible-doctor.
|
||||||
author: John Doe
|
author: John Doe
|
||||||
license: MIT
|
license: MIT
|
||||||
min_ansible_version: "2.10"
|
min_ansible_version: 2.4
|
||||||
platforms:
|
platforms:
|
||||||
- name: EL
|
- name: EL
|
||||||
versions:
|
versions:
|
||||||
- "9"
|
- 7
|
||||||
galaxy_tags:
|
galaxy_tags:
|
||||||
- demo
|
- demo
|
||||||
- documentation
|
- documentation
|
||||||
|
dependencies: []
|
||||||
dependencies:
|
|
||||||
- role: role2
|
|
||||||
- name: namespace.role3
|
|
||||||
|
@ -11,8 +11,6 @@
|
|||||||
- name: Demo task with a tag list
|
- name: Demo task with a tag list
|
||||||
debug:
|
debug:
|
||||||
msg: "Demo message"
|
msg: "Demo message"
|
||||||
tags:
|
|
||||||
- module-tag
|
|
||||||
tags:
|
tags:
|
||||||
- role-tag1
|
- role-tag1
|
||||||
- role-tag2
|
- role-tag2
|
||||||
|
@ -1,9 +0,0 @@
|
|||||||
---
|
|
||||||
logging:
|
|
||||||
level: debug
|
|
||||||
|
|
||||||
template:
|
|
||||||
name: readme
|
|
||||||
|
|
||||||
renderer:
|
|
||||||
include_header: HEADER.md
|
|
@ -1,4 +0,0 @@
|
|||||||
# other-role-custom-header
|
|
||||||
|
|
||||||
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
|
|
||||||
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)
|
|
@ -1,242 +0,0 @@
|
|||||||
# other-role-custom-header
|
|
||||||
|
|
||||||
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
|
|
||||||
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)
|
|
||||||
|
|
||||||
Role to demonstrate ansible-doctor.
|
|
||||||
|
|
||||||
## Table of content
|
|
||||||
|
|
||||||
- [Requirements](#requirements)
|
|
||||||
- [Default Variables](#default-variables)
|
|
||||||
- [other_role_bool](#other_role_bool)
|
|
||||||
- [other_role_deprecated](#other_role_deprecated)
|
|
||||||
- [other_role_deprecated_info](#other_role_deprecated_info)
|
|
||||||
- [other_role_dict](#other_role_dict)
|
|
||||||
- [other_role_empty](#other_role_empty)
|
|
||||||
- [other_role_empty_dict](#other_role_empty_dict)
|
|
||||||
- [other_role_multiline_type](#other_role_multiline_type)
|
|
||||||
- [other_role_other_tags](#other_role_other_tags)
|
|
||||||
- [other_role_override](#other_role_override)
|
|
||||||
- [other_role_override_complex](#other_role_override_complex)
|
|
||||||
- [other_role_single](#other_role_single)
|
|
||||||
- [other_role_undefined_var](#other_role_undefined_var)
|
|
||||||
- [other_role_unset](#other_role_unset)
|
|
||||||
- [Discovered Tags](#discovered-tags)
|
|
||||||
- [Open Tasks](#open-tasks)
|
|
||||||
- [Dependencies](#dependencies)
|
|
||||||
- [License](#license)
|
|
||||||
- [Author](#author)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
- Minimum Ansible version: `2.10`
|
|
||||||
|
|
||||||
## Default Variables
|
|
||||||
|
|
||||||
### other_role_bool
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_bool: true
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Example usage
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_bool: false
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_deprecated
|
|
||||||
|
|
||||||
**_Deprecated_**<br />
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_deprecated: b
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_deprecated_info
|
|
||||||
|
|
||||||
**_Deprecated:_** This variable is deprected since `v2.0.0` and will be removed in a future release.<br />
|
|
||||||
**_Type:_** string<br />
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_deprecated_info: a
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_dict
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_dict:
|
|
||||||
key1:
|
|
||||||
sub: some value
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Example usage
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_dict:
|
|
||||||
key1:
|
|
||||||
sub: some value
|
|
||||||
|
|
||||||
# Inline description
|
|
||||||
key2:
|
|
||||||
sublist:
|
|
||||||
- subval1
|
|
||||||
- subval2
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_empty
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_empty: ''
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_empty_dict
|
|
||||||
|
|
||||||
... or valid json can be used. In this case, the json will be automatically prefixed with the annotation key
|
|
||||||
and filters like `to_nice_yaml` can be used in templates. To get it working, the json need to be prefixed with a `$`.
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_empty_dict: {}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Example usage
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_empty_dict:
|
|
||||||
key1:
|
|
||||||
sub: some value
|
|
||||||
key2:
|
|
||||||
sublist:
|
|
||||||
- subval1
|
|
||||||
- subval2
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_multiline_type
|
|
||||||
|
|
||||||
**_Type:_** string
|
|
||||||
list
|
|
||||||
dict<br />
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_multiline_type: a
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_other_tags
|
|
||||||
|
|
||||||
If a variable need some more explanation, this is a good place to do so.
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_other_tags:
|
|
||||||
- package1
|
|
||||||
- package2
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Example usage
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_other_tags:
|
|
||||||
- package1
|
|
||||||
- package2
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_override
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_override: test
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_override_complex
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_override_complex:
|
|
||||||
foo: bar
|
|
||||||
second: value
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_single
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_single: b
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_undefined_var
|
|
||||||
|
|
||||||
To highlight a variable that has not set a value by default, this is one way to achieve it.
|
|
||||||
Make sure to flag it as json value: `@var other_role_undefined_var: $ "_unset_"`
|
|
||||||
|
|
||||||
| Attribute | Description |
|
|
||||||
| --- | --- |
|
|
||||||
| value1 | desc1 |
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_undefined_var: _unset_
|
|
||||||
```
|
|
||||||
|
|
||||||
### other_role_unset
|
|
||||||
|
|
||||||
Values can be plain strings, but there is no magic or autoformatting...
|
|
||||||
|
|
||||||
#### Default value
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_unset:
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Example usage
|
|
||||||
|
|
||||||
```YAML
|
|
||||||
other_role_unset: some_value
|
|
||||||
```
|
|
||||||
|
|
||||||
## Discovered Tags
|
|
||||||
|
|
||||||
**_role-tag1_**
|
|
||||||
|
|
||||||
**_role-tag2_**
|
|
||||||
|
|
||||||
## Open Tasks
|
|
||||||
|
|
||||||
- Unscoped general todo.
|
|
||||||
- (bug): Some bug that is known and need to be fixed.
|
|
||||||
- (bug): Multi line description are possible as well. Some bug that is known and need to be fixed.
|
|
||||||
- (improvement): Some things that need to be improved.
|
|
||||||
|
|
||||||
## Dependencies
|
|
||||||
|
|
||||||
- role1
|
|
||||||
- role2
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
MIT
|
|
||||||
|
|
||||||
## Author
|
|
||||||
|
|
||||||
[John Doe](https://blog.example.com)
|
|
@ -1,78 +0,0 @@
|
|||||||
---
|
|
||||||
# @var other_role_unset:description: Values can be plain strings, but there is no magic or autoformatting...
|
|
||||||
# @var other_role_unset:example: other_role_unset: some_value
|
|
||||||
other_role_unset:
|
|
||||||
|
|
||||||
other_role_empty: ""
|
|
||||||
other_role_single: "b"
|
|
||||||
|
|
||||||
# @var other_role_bool:example: $ false
|
|
||||||
other_role_bool: True
|
|
||||||
|
|
||||||
# @var other_role_empty_dict:description: >
|
|
||||||
# ... or valid json can be used. In this case, the json will be automatically prefixed with the annotation key
|
|
||||||
# and filters like `to_nice_yaml` can be used in templates. To get it working, the json need to be prefixed with a `$`.
|
|
||||||
# @end
|
|
||||||
# @var other_role_empty_dict:example: $ {"key1": {"sub": "some value"}, "key2": {"sublist": ["subval1", "subval2"]}}
|
|
||||||
other_role_empty_dict: {}
|
|
||||||
|
|
||||||
# @var other_role_dict:example: >
|
|
||||||
# other_role_dict:
|
|
||||||
# key1:
|
|
||||||
# sub: some value
|
|
||||||
#
|
|
||||||
# # Inline description
|
|
||||||
# key2:
|
|
||||||
# sublist:
|
|
||||||
# - subval1
|
|
||||||
# - subval2
|
|
||||||
# @end
|
|
||||||
other_role_dict:
|
|
||||||
key1:
|
|
||||||
sub: some value
|
|
||||||
|
|
||||||
# @var other_role_undefined_var:description: >
|
|
||||||
# To highlight a variable that has not set a value by default, this is one way to achieve it.
|
|
||||||
# Make sure to flag it as json value: `@var other_role_undefined_var: $ "_unset_"`
|
|
||||||
#
|
|
||||||
# | Attribute | Description |
|
|
||||||
# | --- | --- |
|
|
||||||
# | value1 | desc1 |
|
|
||||||
#
|
|
||||||
# @end
|
|
||||||
# @var other_role_undefined_var: $ "_unset_"
|
|
||||||
|
|
||||||
# @var other_role_other_tags:description: >
|
|
||||||
# If a variable need some more explanation, this is a good place to do so.
|
|
||||||
# @end
|
|
||||||
# @var other_role_other_tags:example: $>
|
|
||||||
# [
|
|
||||||
# "package1",
|
|
||||||
# "package2"
|
|
||||||
# ]
|
|
||||||
# @end
|
|
||||||
# @var other_role_other_tags:value: $ ["package1", "package2"]
|
|
||||||
other_role_other_tags: []
|
|
||||||
|
|
||||||
## Simple value
|
|
||||||
# @var other_role_override: $ "test"
|
|
||||||
other_role_override: original
|
|
||||||
|
|
||||||
## Complex value
|
|
||||||
# @var other_role_override_complex:value: $ {"foo":"bar", "second":"value"}
|
|
||||||
other_role_override_complex: {}
|
|
||||||
|
|
||||||
# @var other_role_deprecated:deprecated:
|
|
||||||
other_role_deprecated: "b"
|
|
||||||
|
|
||||||
# @var other_role_deprecated_info:deprecated: >
|
|
||||||
# This variable is deprected since `v2.0.0` and will be removed in a future release.
|
|
||||||
# @var other_role_deprecated_info:type: string
|
|
||||||
other_role_deprecated_info: "a"
|
|
||||||
|
|
||||||
# @var other_role_multiline_type:type: >
|
|
||||||
# string
|
|
||||||
# list
|
|
||||||
# dict
|
|
||||||
# @end
|
|
||||||
other_role_multiline_type: "a"
|
|
@ -1,19 +0,0 @@
|
|||||||
---
|
|
||||||
# @meta author: [John Doe](https\://blog.example.com)
|
|
||||||
galaxy_info:
|
|
||||||
description: Role to demonstrate ansible-doctor.
|
|
||||||
author: John Doe
|
|
||||||
license: MIT
|
|
||||||
min_ansible_version: "2.10"
|
|
||||||
platforms:
|
|
||||||
- name: EL
|
|
||||||
versions:
|
|
||||||
- "9"
|
|
||||||
galaxy_tags:
|
|
||||||
- demo
|
|
||||||
- documentation
|
|
||||||
|
|
||||||
dependencies:
|
|
||||||
- role1
|
|
||||||
- role: role2
|
|
||||||
- name: namespace.role3
|
|
@ -1,16 +0,0 @@
|
|||||||
---
|
|
||||||
# @todo bug: Some bug that is known and need to be fixed.
|
|
||||||
# @todo bug: >
|
|
||||||
# Multi line description are possible as well.
|
|
||||||
# Some bug that is known and need to be fixed.
|
|
||||||
# @end
|
|
||||||
|
|
||||||
# @todo improvement: Some things that need to be improved.
|
|
||||||
# @todo default: Unscoped general todo.
|
|
||||||
|
|
||||||
- name: Demo task with a tag list
|
|
||||||
debug:
|
|
||||||
msg: "Demo message"
|
|
||||||
tags:
|
|
||||||
- role-tag1
|
|
||||||
- role-tag2
|
|
1529
poetry.lock
generated
1529
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
142
pyproject.toml
142
pyproject.toml
@ -10,10 +10,10 @@ classifiers = [
|
|||||||
"Natural Language :: English",
|
"Natural Language :: English",
|
||||||
"Operating System :: POSIX",
|
"Operating System :: POSIX",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: 3.11",
|
|
||||||
"Programming Language :: Python :: 3.12",
|
|
||||||
"Programming Language :: Python :: 3.13",
|
|
||||||
"Topic :: Utilities",
|
"Topic :: Utilities",
|
||||||
"Topic :: Software Development",
|
"Topic :: Software Development",
|
||||||
"Topic :: Software Development :: Documentation",
|
"Topic :: Software Development :: Documentation",
|
||||||
@ -21,53 +21,72 @@ classifiers = [
|
|||||||
description = "Generate documentation from annotated Ansible roles using templates."
|
description = "Generate documentation from annotated Ansible roles using templates."
|
||||||
documentation = "https://ansible-doctor.geekdocs.de/"
|
documentation = "https://ansible-doctor.geekdocs.de/"
|
||||||
homepage = "https://ansible-doctor.geekdocs.de/"
|
homepage = "https://ansible-doctor.geekdocs.de/"
|
||||||
include = ["LICENSE"]
|
include = [
|
||||||
|
"LICENSE",
|
||||||
|
]
|
||||||
keywords = ["ansible", "role", "documentation"]
|
keywords = ["ansible", "role", "documentation"]
|
||||||
license = "GPL-3.0-only"
|
license = "GPL-3.0-only"
|
||||||
name = "ansible-doctor"
|
name = "ansible-doctor"
|
||||||
packages = [{ include = "ansibledoctor" }]
|
packages = [
|
||||||
|
{include = "ansibledoctor"},
|
||||||
|
]
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
repository = "https://github.com/thegeeklab/ansible-doctor/"
|
repository = "https://github.com/thegeeklab/ansible-doctor/"
|
||||||
version = "0.0.0"
|
version = "0.0.0"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
Jinja2 = "3.1.4"
|
Jinja2 = "3.0.3"
|
||||||
anyconfig = "0.14.0"
|
anyconfig = "0.12.0"
|
||||||
appdirs = "1.4.4"
|
appdirs = "1.4.4"
|
||||||
colorama = "0.4.6"
|
colorama = "0.4.4"
|
||||||
pathspec = "0.12.1"
|
environs = "9.5.0"
|
||||||
python = "^3.10.0"
|
jsonschema = "4.4.0"
|
||||||
"ruamel.yaml" = "0.18.6"
|
nested-lookup = "0.2.23"
|
||||||
dynaconf = "3.2.6"
|
pathspec = "0.9.0"
|
||||||
gitpython = "3.1.43"
|
python = "^3.7.0"
|
||||||
ansible-core = { version = "2.14.18", optional = true }
|
python-json-logger = "2.0.2"
|
||||||
structlog = "24.4.0"
|
"ruamel.yaml" = "0.17.21"
|
||||||
|
|
||||||
[tool.poetry.extras]
|
[tool.poetry.dev-dependencies]
|
||||||
ansible-core = ["ansible-core"]
|
bandit = "1.7.2"
|
||||||
|
flake8 = "4.0.1"
|
||||||
|
flake8-blind-except = "0.2.0"
|
||||||
|
flake8-builtins = "1.5.3"
|
||||||
|
flake8-docstrings = "1.6.0"
|
||||||
|
flake8-eradicate = "1.2.0"
|
||||||
|
flake8-isort = "4.1.1"
|
||||||
|
flake8-logging-format = "0.6.0"
|
||||||
|
flake8-pep3101 = "1.3.0"
|
||||||
|
flake8-polyfill = "1.0.2"
|
||||||
|
flake8-quotes = "3.3.1"
|
||||||
|
pep8-naming = "0.12.1"
|
||||||
|
pydocstyle = "6.1.1"
|
||||||
|
pytest = "7.0.1"
|
||||||
|
pytest-cov = "3.0.0"
|
||||||
|
pytest-mock = "3.7.0"
|
||||||
|
yapf = "0.32.0"
|
||||||
|
toml = "0.10.2"
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
ansible-doctor = "ansibledoctor.cli:main"
|
ansible-doctor = "ansibledoctor.cli:main"
|
||||||
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
|
||||||
ruff = "0.7.3"
|
|
||||||
pytest = "8.3.3"
|
|
||||||
pytest-mock = "3.14.0"
|
|
||||||
pytest-cov = "6.0.0"
|
|
||||||
toml = "0.10.2"
|
|
||||||
j2lint = "1.1.0"
|
|
||||||
|
|
||||||
|
|
||||||
[tool.poetry-dynamic-versioning]
|
[tool.poetry-dynamic-versioning]
|
||||||
enable = true
|
enable = true
|
||||||
style = "semver"
|
style = "semver"
|
||||||
vcs = "git"
|
vcs = "git"
|
||||||
|
|
||||||
|
[tool.isort]
|
||||||
|
default_section = "THIRDPARTY"
|
||||||
|
force_single_line = true
|
||||||
|
line_length = 99
|
||||||
|
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
|
||||||
|
skip_glob = ["**/.env*", "**/env/*", "**/.venv/*", "**/docs/*"]
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
addopts = "ansibledoctor --cov=ansibledoctor --cov-report=xml:coverage.xml --cov-report=term --no-cov-on-fail"
|
addopts = "ansibledoctor --cov=ansibledoctor --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
|
||||||
filterwarnings = [
|
filterwarnings = [
|
||||||
"ignore::FutureWarning",
|
"ignore::FutureWarning",
|
||||||
"ignore::DeprecationWarning",
|
"ignore:.*collections.*:DeprecationWarning",
|
||||||
"ignore:.*pep8.*:FutureWarning",
|
"ignore:.*pep8.*:FutureWarning",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -75,70 +94,5 @@ filterwarnings = [
|
|||||||
omit = ["**/test/*"]
|
omit = ["**/test/*"]
|
||||||
|
|
||||||
[build-system]
|
[build-system]
|
||||||
build-backend = "poetry_dynamic_versioning.backend"
|
build-backend = "poetry.core.masonry.api"
|
||||||
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
|
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
exclude = [
|
|
||||||
".git",
|
|
||||||
"__pycache__",
|
|
||||||
"build",
|
|
||||||
"dist",
|
|
||||||
"test",
|
|
||||||
"*.pyc",
|
|
||||||
"*.egg-info",
|
|
||||||
".cache",
|
|
||||||
".eggs",
|
|
||||||
"env*",
|
|
||||||
]
|
|
||||||
|
|
||||||
line-length = 99
|
|
||||||
indent-width = 4
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
|
||||||
# Explanation of errors
|
|
||||||
#
|
|
||||||
# D102: Missing docstring in public method
|
|
||||||
# D103: Missing docstring in public function
|
|
||||||
# D105: Missing docstring in magic method
|
|
||||||
# D107: Missing docstring in __init__
|
|
||||||
# D202: No blank lines allowed after function docstring
|
|
||||||
# D203: One blank line required before class docstring
|
|
||||||
# D212: Multi-line docstring summary should start at the first line
|
|
||||||
ignore = [
|
|
||||||
"D102",
|
|
||||||
"D103",
|
|
||||||
"D105",
|
|
||||||
"D107",
|
|
||||||
"D202",
|
|
||||||
"D203",
|
|
||||||
"D212",
|
|
||||||
"UP038",
|
|
||||||
"RUF012",
|
|
||||||
]
|
|
||||||
select = [
|
|
||||||
"D",
|
|
||||||
"E",
|
|
||||||
"F",
|
|
||||||
"Q",
|
|
||||||
"W",
|
|
||||||
"I",
|
|
||||||
"S",
|
|
||||||
"BLE",
|
|
||||||
"N",
|
|
||||||
"UP",
|
|
||||||
"B",
|
|
||||||
"A",
|
|
||||||
"C4",
|
|
||||||
"T20",
|
|
||||||
"SIM",
|
|
||||||
"RET",
|
|
||||||
"ARG",
|
|
||||||
"ERA",
|
|
||||||
"RUF",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.ruff.format]
|
|
||||||
quote-style = "double"
|
|
||||||
indent-style = "space"
|
|
||||||
line-ending = "lf"
|
|
||||||
|
@ -1,17 +1,4 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||||
"extends": ["github>thegeeklab/renovate-presets"],
|
"extends": ["github>thegeeklab/renovate-presets"]
|
||||||
"packageRules": [
|
|
||||||
{
|
|
||||||
"description": "Ansible base dependencies",
|
|
||||||
"matchPackageNames": ["ansible-core"],
|
|
||||||
"separateMinorPatch": true
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"matchManagers": ["woodpecker"],
|
|
||||||
"matchFileNames": [".woodpecker/test.yml"],
|
|
||||||
"matchPackageNames": ["docker.io/library/python"],
|
|
||||||
"enabled": false
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
|
20
setup.cfg
Normal file
20
setup.cfg
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
[flake8]
|
||||||
|
# Explanation of errors
|
||||||
|
#
|
||||||
|
# D102: Missing docstring in public method
|
||||||
|
# D103: Missing docstring in public function
|
||||||
|
# D105: Missing docstring in magic method
|
||||||
|
# D107: Missing docstring in __init__
|
||||||
|
# D202: No blank lines allowed after function docstring
|
||||||
|
# W503:Line break occurred before a binary operator
|
||||||
|
ignore = D102, D103, D105, D107, D202, W503
|
||||||
|
max-line-length = 99
|
||||||
|
inline-quotes = double
|
||||||
|
exclude = .git, __pycache__, build, dist, test, *.pyc, *.egg-info, .cache, .eggs, env*
|
||||||
|
|
||||||
|
[yapf]
|
||||||
|
based_on_style = google
|
||||||
|
column_limit = 99
|
||||||
|
dedent_closing_brackets = true
|
||||||
|
coalesce_brackets = true
|
||||||
|
split_before_logical_operator = true
|
Loading…
Reference in New Issue
Block a user