Compare commits

..

No commits in common. "main" and "v1.2.4" have entirely different histories.
main ... v1.2.4

73 changed files with 2610 additions and 2408 deletions

23
.chglog/CHANGELOG.tpl.md Executable file
View File

@ -0,0 +1,23 @@
# Changelog
{{ range .Versions -}}
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]({{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}){{ else }}{{ .Tag.Name }}{{ end }} ({{ datetime "2006-01-02" .Tag.Date }})
{{ range .CommitGroups -}}
### {{ .Title }}
{{ range .Commits -}}
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ (regexReplaceAll "(.*)/issues/(.*)" (regexReplaceAll "(Co-\\w*-by.*)" .Subject "") "${1}/pull/${2}") | trim }}
{{ end }}
{{- end -}}
{{- if .NoteGroups -}}
{{ range .NoteGroups -}}
### {{ .Title }}
{{ range .Notes }}
{{ .Body }}
{{ end }}
{{ end -}}
{{ end -}}
{{ end -}}

25
.chglog/config.yml Executable file
View File

@ -0,0 +1,25 @@
style: github
template: CHANGELOG.tpl.md
info:
title: CHANGELOG
repository_url: https://github.com/thegeeklab/ansible-doctor
options:
commit_groups:
title_maps:
feat: Features
fix: Bug Fixes
perf: Performance Improvements
refactor: Code Refactoring
chore: Others
test: Testing
ci: CI Pipeline
docs: Documentation
header:
pattern: "^(\\w*)(?:\\(([\\w\\$\\.\\-\\*\\s]*)\\))?\\:\\s(.*)$"
pattern_maps:
- Type
- Scope
- Subject
notes:
keywords:
- BREAKING CHANGE

View File

@ -8,5 +8,3 @@ SELinux
xoxys
ansible-.+
toc
GPL-3.0
(P|p)re-(C|c)ommit

493
.drone.jsonnet Normal file
View File

@ -0,0 +1,493 @@
local PythonVersion(pyversion='3.7') = {
name: 'python' + std.strReplace(pyversion, '.', '') + '-pytest',
image: 'python:' + pyversion,
environment: {
PY_COLORS: 1,
},
commands: [
'pip install poetry poetry-dynamic-versioning -qq',
'poetry config experimental.new-installer false',
'poetry install',
'poetry version',
'poetry run ansible-doctor --help',
],
depends_on: [
'fetch',
],
};
local PipelineLint = {
kind: 'pipeline',
name: 'lint',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'yapf',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
},
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry config experimental.new-installer false',
'poetry install',
'poetry run yapf -dr ./ansibledoctor',
],
},
{
name: 'flake8',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
},
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry config experimental.new-installer false',
'poetry install',
'poetry run flake8 ./ansibledoctor',
],
},
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineTest = {
kind: 'pipeline',
name: 'test',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'fetch',
image: 'python:3.10',
commands: [
'git fetch -tq',
],
},
PythonVersion(pyversion='3.7'),
PythonVersion(pyversion='3.8'),
PythonVersion(pyversion='3.9'),
PythonVersion(pyversion='3.10'),
],
depends_on: [
'lint',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineSecurity = {
kind: 'pipeline',
name: 'security',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'bandit',
image: 'python:3.10',
environment: {
PY_COLORS: 1,
},
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry config experimental.new-installer false',
'poetry install',
'poetry run bandit -r ./ansibledoctor -x ./ansibledoctor/test',
],
},
],
depends_on: [
'test',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineBuildPackage = {
kind: 'pipeline',
name: 'build-package',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
name: 'build',
image: 'python:3.10',
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry build',
],
},
{
name: 'checksum',
image: 'alpine',
commands: [
'cd dist/ && sha256sum * > ../sha256sum.txt',
],
},
{
name: 'changelog-generate',
image: 'thegeeklab/git-chglog',
commands: [
'git fetch -tq',
'git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}',
],
},
{
name: 'changelog-format',
image: 'thegeeklab/alpine-tools',
commands: [
'prettier CHANGELOG.md',
'prettier -w CHANGELOG.md',
],
},
{
name: 'publish-github',
image: 'plugins/github-release',
settings: {
overwrite: true,
api_key: { from_secret: 'github_token' },
files: ['dist/*', 'sha256sum.txt'],
title: '${DRONE_TAG}',
note: 'CHANGELOG.md',
},
when: {
ref: ['refs/tags/**'],
},
},
{
name: 'publish-pypi',
image: 'python:3.10',
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry publish -n',
],
environment: {
POETRY_HTTP_BASIC_PYPI_USERNAME: { from_secret: 'pypi_username' },
POETRY_HTTP_BASIC_PYPI_PASSWORD: { from_secret: 'pypi_password' },
},
when: {
ref: ['refs/tags/**'],
},
},
],
depends_on: [
'security',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineBuildContainer(arch='amd64') = {
local build = if arch == 'arm' then [{
name: 'build',
image: 'python:3.10-alpine',
commands: [
'apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo',
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry build',
],
environment: {
CARGO_NET_GIT_FETCH_WITH_CLI: true,
},
}] else [{
name: 'build',
image: 'python:3.10',
commands: [
'git fetch -tq',
'pip install poetry poetry-dynamic-versioning -qq',
'poetry build',
],
}],
kind: 'pipeline',
name: 'build-container-' + arch,
platform: {
os: 'linux',
arch: arch,
},
steps: build + [
{
name: 'dryrun',
image: 'thegeeklab/drone-docker:19',
settings: {
dry_run: true,
dockerfile: 'docker/Dockerfile.' + arch,
repo: 'thegeeklab/${DRONE_REPO_NAME}',
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
},
depends_on: ['build'],
when: {
ref: ['refs/pull/**'],
},
},
{
name: 'publish-dockerhub',
image: 'thegeeklab/drone-docker:19',
settings: {
auto_tag: true,
auto_tag_suffix: arch,
dockerfile: 'docker/Dockerfile.' + arch,
repo: 'thegeeklab/${DRONE_REPO_NAME}',
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
},
when: {
ref: ['refs/heads/main', 'refs/tags/**'],
},
depends_on: ['dryrun'],
},
{
name: 'publish-quay',
image: 'thegeeklab/drone-docker:19',
settings: {
auto_tag: true,
auto_tag_suffix: arch,
dockerfile: 'docker/Dockerfile.' + arch,
registry: 'quay.io',
repo: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
username: { from_secret: 'quay_username' },
password: { from_secret: 'quay_password' },
},
when: {
ref: ['refs/heads/main', 'refs/tags/**'],
},
depends_on: ['dryrun'],
},
],
depends_on: [
'security',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineDocs = {
kind: 'pipeline',
name: 'docs',
platform: {
os: 'linux',
arch: 'amd64',
},
concurrency: {
limit: 1,
},
steps: [
{
name: 'assets',
image: 'thegeeklab/alpine-tools',
commands: [
'make doc',
],
},
{
name: 'markdownlint',
image: 'thegeeklab/markdownlint-cli',
commands: [
"markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'",
],
},
{
name: 'spellcheck',
image: 'node:lts-alpine',
commands: [
'npm install -g spellchecker-cli',
"spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions",
],
environment: {
FORCE_COLOR: true,
NPM_CONFIG_LOGLEVEL: 'error',
},
},
{
name: 'testbuild',
image: 'thegeeklab/hugo:0.93.2',
commands: [
'hugo --panicOnWarning -s docs/ -b http://localhost/',
],
},
{
name: 'link-validation',
image: 'thegeeklab/link-validator',
commands: [
'link-validator -ro',
],
environment: {
LINK_VALIDATOR_BASE_DIR: 'docs/public',
},
},
{
name: 'build',
image: 'thegeeklab/hugo:0.93.2',
commands: [
'hugo --panicOnWarning -s docs/',
],
},
{
name: 'beautify',
image: 'node:lts-alpine',
commands: [
'npm install -g js-beautify',
"html-beautify -r -f 'docs/public/**/*.html'",
],
environment: {
FORCE_COLOR: true,
NPM_CONFIG_LOGLEVEL: 'error',
},
},
{
name: 'publish',
image: 'plugins/s3-sync',
settings: {
access_key: { from_secret: 's3_access_key' },
bucket: 'geekdocs',
delete: true,
endpoint: 'https://sp.rknet.org',
path_style: true,
secret_key: { from_secret: 's3_secret_access_key' },
source: 'docs/public/',
strip_prefix: 'docs/public/',
target: '/${DRONE_REPO_NAME}',
},
when: {
ref: ['refs/heads/main', 'refs/tags/**'],
},
},
],
depends_on: [
'build-package',
'build-container-amd64',
'build-container-arm64',
'build-container-arm',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**', 'refs/pull/**'],
},
};
local PipelineNotifications = {
kind: 'pipeline',
name: 'notifications',
platform: {
os: 'linux',
arch: 'amd64',
},
steps: [
{
image: 'plugins/manifest',
name: 'manifest-dockerhub',
settings: {
ignore_missing: true,
auto_tag: true,
username: { from_secret: 'docker_username' },
password: { from_secret: 'docker_password' },
spec: 'docker/manifest.tmpl',
},
when: {
status: ['success'],
},
},
{
image: 'plugins/manifest',
name: 'manifest-quay',
settings: {
ignore_missing: true,
auto_tag: true,
username: { from_secret: 'quay_username' },
password: { from_secret: 'quay_password' },
spec: 'docker/manifest-quay.tmpl',
},
when: {
status: ['success'],
},
},
{
name: 'pushrm-dockerhub',
pull: 'always',
image: 'chko/docker-pushrm:1',
environment: {
DOCKER_PASS: {
from_secret: 'docker_password',
},
DOCKER_USER: {
from_secret: 'docker_username',
},
PUSHRM_FILE: 'README.md',
PUSHRM_SHORT: 'Annotation based documentation for your Ansible roles',
PUSHRM_TARGET: 'thegeeklab/${DRONE_REPO_NAME}',
},
when: {
status: ['success'],
},
},
{
name: 'pushrm-quay',
pull: 'always',
image: 'chko/docker-pushrm:1',
environment: {
APIKEY__QUAY_IO: {
from_secret: 'quay_token',
},
PUSHRM_FILE: 'README.md',
PUSHRM_TARGET: 'quay.io/thegeeklab/${DRONE_REPO_NAME}',
},
when: {
status: ['success'],
},
},
{
name: 'matrix',
image: 'thegeeklab/drone-matrix',
settings: {
homeserver: { from_secret: 'matrix_homeserver' },
roomid: { from_secret: 'matrix_roomid' },
template: 'Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}',
username: { from_secret: 'matrix_username' },
password: { from_secret: 'matrix_password' },
},
when: {
status: ['success', 'failure'],
},
},
],
depends_on: [
'docs',
],
trigger: {
ref: ['refs/heads/main', 'refs/tags/**'],
status: ['success', 'failure'],
},
};
[
PipelineLint,
PipelineTest,
PipelineSecurity,
PipelineBuildPackage,
PipelineBuildContainer(arch='amd64'),
PipelineBuildContainer(arch='arm64'),
PipelineBuildContainer(arch='arm'),
PipelineDocs,
PipelineNotifications,
]

635
.drone.yml Normal file
View File

@ -0,0 +1,635 @@
---
kind: pipeline
name: lint
platform:
os: linux
arch: amd64
steps:
- name: yapf
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install
- poetry run yapf -dr ./ansibledoctor
environment:
PY_COLORS: 1
- name: flake8
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install
- poetry run flake8 ./ansibledoctor
environment:
PY_COLORS: 1
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
---
kind: pipeline
name: test
platform:
os: linux
arch: amd64
steps:
- name: fetch
image: python:3.10
commands:
- git fetch -tq
- name: python37-pytest
image: python:3.7
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install
- poetry version
- poetry run ansible-doctor --help
environment:
PY_COLORS: 1
depends_on:
- fetch
- name: python38-pytest
image: python:3.8
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install
- poetry version
- poetry run ansible-doctor --help
environment:
PY_COLORS: 1
depends_on:
- fetch
- name: python39-pytest
image: python:3.9
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install
- poetry version
- poetry run ansible-doctor --help
environment:
PY_COLORS: 1
depends_on:
- fetch
- name: python310-pytest
image: python:3.10
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install
- poetry version
- poetry run ansible-doctor --help
environment:
PY_COLORS: 1
depends_on:
- fetch
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- lint
---
kind: pipeline
name: security
platform:
os: linux
arch: amd64
steps:
- name: bandit
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry config experimental.new-installer false
- poetry install
- poetry run bandit -r ./ansibledoctor -x ./ansibledoctor/test
environment:
PY_COLORS: 1
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- test
---
kind: pipeline
name: build-package
platform:
os: linux
arch: amd64
steps:
- name: build
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: checksum
image: alpine
commands:
- cd dist/ && sha256sum * > ../sha256sum.txt
- name: changelog-generate
image: thegeeklab/git-chglog
commands:
- git fetch -tq
- git-chglog --no-color --no-emoji -o CHANGELOG.md ${DRONE_TAG:---next-tag unreleased unreleased}
- name: changelog-format
image: thegeeklab/alpine-tools
commands:
- prettier CHANGELOG.md
- prettier -w CHANGELOG.md
- name: publish-github
image: plugins/github-release
settings:
api_key:
from_secret: github_token
files:
- dist/*
- sha256sum.txt
note: CHANGELOG.md
overwrite: true
title: ${DRONE_TAG}
when:
ref:
- refs/tags/**
- name: publish-pypi
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry publish -n
environment:
POETRY_HTTP_BASIC_PYPI_PASSWORD:
from_secret: pypi_password
POETRY_HTTP_BASIC_PYPI_USERNAME:
from_secret: pypi_username
when:
ref:
- refs/tags/**
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: build-container-amd64
platform:
os: linux
arch: amd64
steps:
- name: build
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: dryrun
image: thegeeklab/drone-docker:19
settings:
dockerfile: docker/Dockerfile.amd64
dry_run: true
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/pull/**
depends_on:
- build
- name: publish-dockerhub
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: amd64
dockerfile: docker/Dockerfile.amd64
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
- name: publish-quay
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: amd64
dockerfile: docker/Dockerfile.amd64
password:
from_secret: quay_password
registry: quay.io
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: quay_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: build-container-arm64
platform:
os: linux
arch: arm64
steps:
- name: build
image: python:3.10
commands:
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: dryrun
image: thegeeklab/drone-docker:19
settings:
dockerfile: docker/Dockerfile.arm64
dry_run: true
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/pull/**
depends_on:
- build
- name: publish-dockerhub
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm64
dockerfile: docker/Dockerfile.arm64
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
- name: publish-quay
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm64
dockerfile: docker/Dockerfile.arm64
password:
from_secret: quay_password
registry: quay.io
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: quay_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: build-container-arm
platform:
os: linux
arch: arm
steps:
- name: build
image: python:3.10-alpine
commands:
- apk add -Uq --no-cache build-base openssl-dev libffi-dev musl-dev python3-dev git cargo
- git fetch -tq
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
environment:
CARGO_NET_GIT_FETCH_WITH_CLI: true
- name: dryrun
image: thegeeklab/drone-docker:19
settings:
dockerfile: docker/Dockerfile.arm
dry_run: true
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/pull/**
depends_on:
- build
- name: publish-dockerhub
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm
dockerfile: docker/Dockerfile.arm
password:
from_secret: docker_password
repo: thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: docker_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
- name: publish-quay
image: thegeeklab/drone-docker:19
settings:
auto_tag: true
auto_tag_suffix: arm
dockerfile: docker/Dockerfile.arm
password:
from_secret: quay_password
registry: quay.io
repo: quay.io/thegeeklab/${DRONE_REPO_NAME}
username:
from_secret: quay_username
when:
ref:
- refs/heads/main
- refs/tags/**
depends_on:
- dryrun
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- security
---
kind: pipeline
name: docs
platform:
os: linux
arch: amd64
concurrency:
limit: 1
steps:
- name: assets
image: thegeeklab/alpine-tools
commands:
- make doc
- name: markdownlint
image: thegeeklab/markdownlint-cli
commands:
- markdownlint 'docs/content/**/*.md' 'README.md' 'CONTRIBUTING.md'
- name: spellcheck
image: node:lts-alpine
commands:
- npm install -g spellchecker-cli
- spellchecker --files 'docs/content/**/*.md' 'README.md' -d .dictionary -p spell indefinite-article syntax-urls --no-suggestions
environment:
FORCE_COLOR: true
NPM_CONFIG_LOGLEVEL: error
- name: testbuild
image: thegeeklab/hugo:0.93.2
commands:
- hugo --panicOnWarning -s docs/ -b http://localhost/
- name: link-validation
image: thegeeklab/link-validator
commands:
- link-validator -ro
environment:
LINK_VALIDATOR_BASE_DIR: docs/public
- name: build
image: thegeeklab/hugo:0.93.2
commands:
- hugo --panicOnWarning -s docs/
- name: beautify
image: node:lts-alpine
commands:
- npm install -g js-beautify
- html-beautify -r -f 'docs/public/**/*.html'
environment:
FORCE_COLOR: true
NPM_CONFIG_LOGLEVEL: error
- name: publish
image: plugins/s3-sync
settings:
access_key:
from_secret: s3_access_key
bucket: geekdocs
delete: true
endpoint: https://sp.rknet.org
path_style: true
secret_key:
from_secret: s3_secret_access_key
source: docs/public/
strip_prefix: docs/public/
target: /${DRONE_REPO_NAME}
when:
ref:
- refs/heads/main
- refs/tags/**
trigger:
ref:
- refs/heads/main
- refs/tags/**
- refs/pull/**
depends_on:
- build-package
- build-container-amd64
- build-container-arm64
- build-container-arm
---
kind: pipeline
name: notifications
platform:
os: linux
arch: amd64
steps:
- name: manifest-dockerhub
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
password:
from_secret: docker_password
spec: docker/manifest.tmpl
username:
from_secret: docker_username
when:
status:
- success
- name: manifest-quay
image: plugins/manifest
settings:
auto_tag: true
ignore_missing: true
password:
from_secret: quay_password
spec: docker/manifest-quay.tmpl
username:
from_secret: quay_username
when:
status:
- success
- name: pushrm-dockerhub
pull: always
image: chko/docker-pushrm:1
environment:
DOCKER_PASS:
from_secret: docker_password
DOCKER_USER:
from_secret: docker_username
PUSHRM_FILE: README.md
PUSHRM_SHORT: Annotation based documentation for your Ansible roles
PUSHRM_TARGET: thegeeklab/${DRONE_REPO_NAME}
when:
status:
- success
- name: pushrm-quay
pull: always
image: chko/docker-pushrm:1
environment:
APIKEY__QUAY_IO:
from_secret: quay_token
PUSHRM_FILE: README.md
PUSHRM_TARGET: quay.io/thegeeklab/${DRONE_REPO_NAME}
when:
status:
- success
- name: matrix
image: thegeeklab/drone-matrix
settings:
homeserver:
from_secret: matrix_homeserver
password:
from_secret: matrix_password
roomid:
from_secret: matrix_roomid
template: "Status: **{{ build.Status }}**<br/> Build: [{{ repo.Owner }}/{{ repo.Name }}]({{ build.Link }}){{#if build.Branch}} ({{ build.Branch }}){{/if}} by {{ commit.Author }}<br/> Message: {{ commit.Message.Title }}"
username:
from_secret: matrix_username
when:
status:
- success
- failure
trigger:
ref:
- refs/heads/main
- refs/tags/**
status:
- success
- failure
depends_on:
- docs
---
kind: signature
hmac: 87e424afec40d86266e7ebfb6bb9eaf77313cd2800bc810fae0ff86408641da0
...

View File

@ -52,11 +52,6 @@ branches:
required_status_checks:
strict: false
contexts:
- ci/woodpecker/pr/lint
- ci/woodpecker/pr/test
- ci/woodpecker/pr/build-package
- ci/woodpecker/pr/build-container
- ci/woodpecker/pr/docs
enforce_admins: false
required_linear_history: true
- continuous-integration/drone/pr
enforce_admins: null
restrictions: null

2
.gitignore vendored
View File

@ -106,8 +106,6 @@ pip-wheel-metadata
docs/themes/
docs/public/
resources/_gen/
.hugo_build.lock
# Misc
CHANGELOG.md
.ruff_cache

View File

@ -1,47 +0,0 @@
---
version: "1.1"
versioning:
update-major: []
update-minor: [feat]
update-patch: [fix, perf, refactor, chore, test, ci, docs]
tag:
pattern: "v%d.%d.%d"
release-notes:
sections:
- name: Features
commit-types: [feat]
section-type: commits
- name: Bug Fixes
commit-types: [fix]
section-type: commits
- name: Performance Improvements
commit-types: [perf]
section-type: commits
- name: Code Refactoring
commit-types: [refactor]
section-type: commits
- name: Others
commit-types: [chore]
section-type: commits
- name: Testing
commit-types: [test]
section-type: commits
- name: CI Pipeline
commit-types: [ci]
section-type: commits
- name: Documentation
commit-types: [docs]
section-type: commits
- name: Breaking Changes
section-type: breaking-changes
commit-message:
footer:
issue:
key: issue
add-value-prefix: "#"
issue:
regex: "#?[0-9]+"

View File

@ -1 +0,0 @@
https://hub.docker.com/r/thegeeklab/*

View File

@ -2,9 +2,5 @@
default: True
MD013: False
MD041: False
MD024: False
MD004:
style: dash
MD033:
allowed_elements:
- "br"

View File

@ -1,10 +0,0 @@
---
- id: ansible-doctor
name: ansible-doctor
description: Create annotation based documentation for your Ansible roles.
entry: ansible-doctor -f -qqq
language: python
pass_filenames: False
always_run: True
additional_dependencies:
- .[ansible-core]

View File

@ -1,2 +1,3 @@
.drone.yml
*.tpl.md
LICENSE

View File

@ -1,73 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: build
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: dryrun
image: quay.io/thegeeklab/wp-docker-buildx:4
settings:
containerfile: Containerfile.multiarch
dry_run: true
platforms:
- linux/amd64
- linux/arm64
provenance: false
repo: ${CI_REPO}
when:
- event: [pull_request]
- name: publish-dockerhub
image: quay.io/thegeeklab/wp-docker-buildx:4
group: container
settings:
auto_tag: true
containerfile: Containerfile.multiarch
password:
from_secret: docker_password
platforms:
- linux/amd64
- linux/arm64
provenance: false
repo: ${CI_REPO}
username:
from_secret: docker_username
when:
- event: [tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
- name: publish-quay
image: quay.io/thegeeklab/wp-docker-buildx:4
group: container
settings:
auto_tag: true
containerfile: Containerfile.multiarch
password:
from_secret: quay_password
platforms:
- linux/amd64
- linux/arm64
provenance: false
registry: quay.io
repo: quay.io/${CI_REPO}
username:
from_secret: quay_username
when:
- event: [tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
depends_on:
- lint
- test

View File

@ -1,56 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: build
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry build
- name: checksum
image: quay.io/thegeeklab/alpine-tools
commands:
- cd dist/ && sha256sum * > ../sha256sum.txt
- name: changelog
image: quay.io/thegeeklab/git-sv
commands:
- git sv current-version
- git sv release-notes -t ${CI_COMMIT_TAG:-next} -o CHANGELOG.md
- cat CHANGELOG.md
- name: publish-github
image: docker.io/plugins/github-release
settings:
api_key:
from_secret: github_token
files:
- dist/*
- sha256sum.txt
note: CHANGELOG.md
overwrite: true
title: ${CI_COMMIT_TAG}
when:
- event: [tag]
- name: publish-pypi
image: docker.io/library/python:3.12
secrets:
- source: pypi_password
target: POETRY_HTTP_BASIC_PYPI_PASSWORD
- source: pypi_username
target: POETRY_HTTP_BASIC_PYPI_USERNAME
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry publish -n
when:
- event: [tag]
depends_on:
- lint
- test

View File

@ -1,100 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: assets
image: quay.io/thegeeklab/alpine-tools
commands:
- make doc
- name: markdownlint
image: quay.io/thegeeklab/markdownlint-cli
group: test
commands:
- markdownlint 'README.md' 'CONTRIBUTING.md'
- name: spellcheck
image: quay.io/thegeeklab/alpine-tools
group: test
commands:
- spellchecker --files 'docs/**/*.md' 'README.md' 'CONTRIBUTING.md' -d .dictionary -p spell indefinite-article syntax-urls
environment:
FORCE_COLOR: "true"
- name: link-validation
image: docker.io/lycheeverse/lychee
group: test
commands:
- lychee --no-progress --format detailed docs/content README.md
- name: build
image: quay.io/thegeeklab/hugo:0.125.7
commands:
- hugo --panicOnWarning -s docs/
- name: beautify
image: quay.io/thegeeklab/alpine-tools
commands:
- html-beautify -r -f 'docs/public/**/*.html'
environment:
FORCE_COLOR: "true"
- name: publish
image: quay.io/thegeeklab/wp-s3-action
settings:
access_key:
from_secret: s3_access_key
bucket: geekdocs
delete: true
endpoint:
from_secret: s3_endpoint
path_style: true
secret_key:
from_secret: s3_secret_access_key
source: docs/public/
strip_prefix: docs/public/
target: /${CI_REPO_NAME}
when:
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
status: [success, failure]
- name: pushrm-dockerhub
image: docker.io/chko/docker-pushrm:1
secrets:
- source: docker_password
target: DOCKER_PASS
- source: docker_username
target: DOCKER_USER
environment:
PUSHRM_FILE: README.md
PUSHRM_SHORT: Annotation based documentation for your Ansible roles
PUSHRM_TARGET: ${CI_REPO}
when:
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
status: [success]
- name: pushrm-quay
image: docker.io/chko/docker-pushrm:1
secrets:
- source: quay_token
target: APIKEY__QUAY_IO
environment:
PUSHRM_FILE: README.md
PUSHRM_TARGET: quay.io/${CI_REPO}
when:
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
status: [success]
depends_on:
- build-package
- build-container

View File

@ -1,34 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
steps:
- name: check-format
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run ruff format --check --diff ./${CI_REPO_NAME//-/}
environment:
PY_COLORS: "1"
- name: check-coding
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run ruff check ./${CI_REPO_NAME//-/}
environment:
PY_COLORS: "1"
- name: check-jinja
image: docker.io/library/python:3.12
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry run j2lint ansibledoctor/templates/ -i jinja-statements-indentation jinja-statements-delimiter
environment:
PY_COLORS: "1"

View File

@ -1,26 +0,0 @@
---
when:
- event: [tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
runs_on: [success, failure]
steps:
- name: matrix
image: quay.io/thegeeklab/wp-matrix
settings:
homeserver:
from_secret: matrix_homeserver
password:
from_secret: matrix_password
roomid:
from_secret: matrix_roomid
username:
from_secret: matrix_username
when:
- status: [success, failure]
depends_on:
- docs

View File

@ -1,34 +0,0 @@
---
when:
- event: [pull_request, tag]
- event: [push, manual]
branch:
- ${CI_REPO_DEFAULT_BRANCH}
variables:
- &pytest_base
group: pytest
commands:
- pip install poetry poetry-dynamic-versioning -qq
- poetry install -E ansible-core
- poetry version
- poetry run ${CI_REPO_NAME} --help
environment:
PY_COLORS: "1"
steps:
- name: python-312
image: docker.io/library/python:3.12
<<: *pytest_base
- name: python-311
image: docker.io/library/python:3.11
<<: *pytest_base
- name: python-310
image: docker.io/library/python:3.10
<<: *pytest_base
- name: python-39
image: docker.io/library/python:3.9
<<: *pytest_base

View File

@ -3,7 +3,7 @@
## Security
If you think you have found a **security issue**, please do not mention it in this repository.
Instead, send an email to `security@thegeeklab.de` with as many details as possible so it can be handled confidential.
Instead, send an email to security@thegeeklab.de with as many details as possible so it can be handled confidential.
## Bug Reports and Feature Requests

View File

@ -1,5 +1,5 @@
# renovate: datasource=github-releases depName=thegeeklab/hugo-geekdoc
THEME_VERSION := v0.45.0
THEME_VERSION := v0.27.5
THEME := hugo-geekdoc
BASEDIR := docs
THEMEDIR := $(BASEDIR)/themes

View File

@ -2,7 +2,7 @@
Annotation based documentation for your Ansible roles
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-doctor?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-doctor)
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-doctor)
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-doctor)
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-doctor.svg)](https://pypi.org/project/ansible-doctor/)
@ -22,7 +22,7 @@ The full documentation is available at [https://ansible-doctor.geekdocs.de](http
## Contributors
Special thanks to all [contributors](https://github.com/thegeeklab/ansible-doctor/graphs/contributors). If you would like to contribute,
Special thanks goes to all [contributors](https://github.com/thegeeklab/ansible-doctor/graphs/contributors). If you would like to contribute,
please see the [instructions](https://github.com/thegeeklab/ansible-doctor/blob/main/CONTRIBUTING.md).
## License

View File

@ -1,10 +1,3 @@
"""Provide version information."""
"""Default package."""
__version__ = "0.0.0"
import sys
try:
import ansible # noqa
except ImportError:
sys.exit("ERROR: Python requirements are missing: 'ansible-core' not found.")

View File

@ -8,7 +8,7 @@ from collections import defaultdict
import anyconfig
from ansibledoctor.config import SingleConfig
from ansibledoctor.utils import SingleLog, _split_string
from ansibledoctor.utils import SingleLog
class AnnotationItem:
@ -20,11 +20,9 @@ class AnnotationItem:
def __str__(self):
"""Beautify object string output."""
for key in self.data:
for key in self.data.keys():
for sub in self.data.get(key):
return f"AnnotationItem({key}: {sub})"
return "None"
return "AnnotationItem({}: {})".format(key, sub)
def get_obj(self):
return self.data
@ -43,7 +41,7 @@ class Annotation:
self._all_annotations = self.config.get_annotations_definition()
if name in self._all_annotations:
if name in self._all_annotations.keys():
self._annotation_definition = self._all_annotations[name]
if self._annotation_definition is not None:
@ -55,23 +53,26 @@ class Annotation:
def _find_annotation(self):
regex = r"(\#\ *\@" + self._annotation_definition["name"] + r"\ +.*)"
for rfile in self._files_registry.get_files():
with open(rfile, encoding="utf8") as self._file_handler:
num = 1
while True:
line = self._file_handler.readline()
if not line:
break
self._file_handler = open(rfile, encoding="utf8")
if re.match(regex, line.strip()):
item = self._get_annotation_data(
num, line, self._annotation_definition["name"], rfile
num = 1
while True:
line = self._file_handler.readline()
if not line:
break
if re.match(regex, line.strip()):
item = self._get_annotation_data(
num, line, self._annotation_definition["name"], rfile
)
if item:
self.logger.info(str(item))
self._populate_item(
item.get_obj().items(), self._annotation_definition["name"]
)
if item:
self.logger.info(str(item))
self._populate_item(
item.get_obj().items(), self._annotation_definition["name"]
)
num += 1
num += 1
self._file_handler.close()
def _populate_item(self, item, name):
allow_multiple = self.config.ANNOTATIONS.get(name)["allow_multiple"]
@ -85,7 +86,9 @@ class Annotation:
try:
anyconfig.merge(self._all_items[key], value, ac_merge=anyconfig.MS_DICTS)
except ValueError as e:
self.log.sysexit_with_message(f"Unable to merge annotation values:\n{e}")
self.log.sysexit_with_message(
"Unable to merge annotation values:\n{}".format(e)
)
def _get_annotation_data(self, num, line, name, rfile):
"""
@ -100,20 +103,20 @@ class Annotation:
line1 = re.sub(reg1, "", line).strip()
# step3 take the main key value from the annotation
parts = [part.strip() for part in _split_string(line1, ":", "\\", 2)]
parts = [part.strip() for part in line1.split(":", 2)]
key = str(parts[0])
item.data[key] = {}
multiline_char = [">", "$>"]
if len(parts) < 2:
return None
return
if len(parts) == 2:
parts = parts[:1] + ["value"] + parts[1:]
subtypes = self.config.ANNOTATIONS.get(name)["subtypes"]
if subtypes and parts[1] not in subtypes:
return None
return
content = [parts[2]]
@ -159,7 +162,8 @@ class Annotation:
if len(test_line) == 0:
before = "\n\n"
continue
before = ""
else:
before = ""
if test_line.endswith("\\"):
final = final.rstrip("\\").strip()
@ -181,5 +185,7 @@ class Annotation:
return {key: json.loads(string)}
except ValueError:
self.log.sysexit_with_message(
f"Json value error: Can't parse json in {rfile}:{num!s}:\n{line.strip()}"
"Json value error: Can't parse json in {}:{}:\n{}".format(
rfile, str(num), line.strip()
)
)

View File

@ -2,7 +2,6 @@
"""Entrypoint and CLI handler."""
import argparse
import os
import ansibledoctor.exception
from ansibledoctor import __version__
@ -13,14 +12,17 @@ from ansibledoctor.utils import SingleLog
class AnsibleDoctor:
"""Create main object."""
"""Main doctor object."""
def __init__(self):
self.log = SingleLog()
self.logger = self.log.logger
self.args = self._cli_args()
self.config = self._get_config()
self._execute()
doc_parser = Parser()
doc_generator = Generator(doc_parser)
doc_generator.render()
def _cli_args(self):
"""
@ -33,21 +35,13 @@ class AnsibleDoctor:
description="Generate documentation from annotated Ansible roles using templates"
)
parser.add_argument(
"base_dir", nargs="?", help="base directory (default: current working directory)"
"role_dir", nargs="?", help="role directory (default: current working dir)"
)
parser.add_argument(
"-c", "--config", dest="config_file", help="path to configuration file"
"-c", "--config", dest="config_file", help="location of configuration file"
)
parser.add_argument(
"-o", "--output", dest="output_dir", action="store", help="output directory"
)
parser.add_argument(
"-r",
"--recursive",
dest="recursive",
action="store_true",
default=None,
help="run recursively over the base directory subfolders",
"-o", "--output", dest="output_dir", action="store", help="output base dir"
)
parser.add_argument(
"-f",
@ -55,7 +49,7 @@ class AnsibleDoctor:
dest="force_overwrite",
action="store_true",
default=None,
help="force overwrite output file",
help="force overwrite output file"
)
parser.add_argument(
"-d",
@ -63,7 +57,7 @@ class AnsibleDoctor:
dest="dry_run",
action="store_true",
default=None,
help="dry run without writing",
help="dry run without writing"
)
parser.add_argument(
"-n",
@ -71,7 +65,7 @@ class AnsibleDoctor:
dest="role_detection",
action="store_false",
default=None,
help="disable automatic role detection",
help="disable automatic role detection"
)
parser.add_argument(
"-v", dest="logging.level", action="append_const", const=-1, help="increase log level"
@ -79,7 +73,9 @@ class AnsibleDoctor:
parser.add_argument(
"-q", dest="logging.level", action="append_const", const=1, help="decrease log level"
)
parser.add_argument("--version", action="version", version=f"%(prog)s {__version__}")
parser.add_argument(
"--version", action="version", version="%(prog)s {}".format(__version__)
)
return parser.parse_args().__dict__
@ -89,38 +85,22 @@ class AnsibleDoctor:
except ansibledoctor.exception.ConfigError as e:
self.log.sysexit_with_message(e)
return config
try:
self.log.set_level(config.config["logging"]["level"])
except ValueError as e:
self.log.sysexit_with_message("Can not set log level.\n{}".format(str(e)))
def _execute(self):
cwd = self.config.base_dir
walkdirs = [cwd]
if self.config.recursive:
walkdirs = [f.path for f in os.scandir(cwd) if f.is_dir()]
for item in walkdirs:
os.chdir(item)
self.config.set_config(base_dir=os.getcwd())
try:
self.log.set_level(self.config.config["logging"]["level"])
except ValueError as e:
self.log.sysexit_with_message(f"Can not set log level.\n{e!s}")
self.logger.info(f"Using config file: {self.config.config_file}")
self.logger.debug(f"Using working dir: {item}")
if self.config.config["role_detection"]:
if self.config.is_role:
self.logger.info(f"Ansible role detected: {self.config.config['role_name']}")
else:
self.log.sysexit_with_message("No Ansible role detected")
if config.config["role_detection"]:
if config.is_role:
self.logger.info("Ansible role detected")
else:
self.logger.info("Ansible role detection disabled")
self.log.sysexit_with_message("No Ansible role detected")
else:
self.logger.info("Ansible role detection disabled")
doc_parser = Parser()
doc_generator = Generator(doc_parser)
doc_generator.render()
self.logger.info("Using config file {}".format(config.config_file))
return config
def main():

View File

@ -15,10 +15,9 @@ from ansibledoctor.utils import Singleton
config_dir = AppDirs("ansible-doctor").user_config_dir
default_config_file = os.path.join(config_dir, "config.yml")
default_envs_prefix = "ANSIBLE_DOCTOR_"
class Config:
class Config():
"""
Create an object with all necessary settings.
@ -30,104 +29,79 @@ class Config:
SETTINGS = {
"config_file": {
"default": default_config_file,
"default": "",
"env": "CONFIG_FILE",
"type": environs.Env().str,
"type": environs.Env().str
},
"base_dir": {
"default": os.getcwd(),
"refresh": os.getcwd,
"env": "BASE_DIR",
"type": environs.Env().str,
"role_dir": {
"default": "",
"env": "ROLE_DIR",
"type": environs.Env().str
},
"role_name": {
"default": "",
"env": "ROLE_NAME",
"type": environs.Env().str,
"type": environs.Env().str
},
"dry_run": {
"default": False,
"env": "DRY_RUN",
"file": True,
"type": environs.Env().bool,
"type": environs.Env().bool
},
"logging.level": {
"default": "WARNING",
"env": "LOG_LEVEL",
"file": True,
"type": environs.Env().str,
"type": environs.Env().str
},
"logging.json": {
"default": False,
"env": "LOG_JSON",
"file": True,
"type": environs.Env().bool,
"type": environs.Env().bool
},
"output_dir": {
"default": os.getcwd(),
"refresh": os.getcwd,
"env": "OUTPUT_DIR",
"file": True,
"type": environs.Env().str,
},
"recursive": {
"default": False,
"env": "RECURSIVE",
"type": environs.Env().bool,
"type": environs.Env().str
},
"template_dir": {
"default": os.path.join(os.path.dirname(os.path.realpath(__file__)), "templates"),
"env": "TEMPLATE_DIR",
"file": True,
"type": environs.Env().str,
"type": environs.Env().str
},
"template": {
"default": "readme",
"env": "TEMPLATE",
"file": True,
"type": environs.Env().str,
},
"template_autotrim": {
"default": True,
"env": "TEMPLATE_AUTOTRIM",
"file": True,
"type": environs.Env().bool,
"type": environs.Env().str
},
"force_overwrite": {
"default": False,
"env": "FORCE_OVERWRITE",
"file": True,
"type": environs.Env().bool,
"type": environs.Env().bool
},
"custom_header": {
"default": "",
"env": "CUSTOM_HEADER",
"file": True,
"type": environs.Env().str,
"type": environs.Env().str
},
"exclude_files": {
"default": [],
"env": "EXCLUDE_FILES",
"file": True,
"type": environs.Env().list,
},
"exclude_tags": {
"default": [],
"env": "EXCLUDE_TAGS",
"file": True,
"type": environs.Env().list,
"type": environs.Env().list
},
"role_detection": {
"default": True,
"env": "ROLE_DETECTION",
"file": True,
"type": environs.Env().bool,
},
"tabulate_variables": {
"default": False,
"env": "TABULATE_VARIABLES",
"file": True,
"type": environs.Env().bool,
"type": environs.Env().bool
},
}
@ -136,35 +110,35 @@ class Config:
"name": "meta",
"automatic": True,
"subtypes": ["value"],
"allow_multiple": False,
"allow_multiple": False
},
"todo": {
"name": "todo",
"automatic": True,
"subtypes": ["value"],
"allow_multiple": True,
"allow_multiple": True
},
"var": {
"name": "var",
"automatic": True,
"subtypes": ["value", "example", "description", "type", "deprecated"],
"allow_multiple": False,
"subtypes": ["value", "example", "description"],
"allow_multiple": False
},
"example": {
"name": "example",
"automatic": True,
"subtypes": [],
"allow_multiple": False,
"allow_multiple": False
},
"tag": {
"name": "tag",
"automatic": True,
"subtypes": ["value", "description"],
"allow_multiple": False,
"allow_multiple": False
},
}
def __init__(self, args=None):
def __init__(self, args={}):
"""
Initialize a new settings class.
@ -173,14 +147,13 @@ class Config:
:returns: None
"""
if args is None:
self._args = {}
else:
self._args = args
self._args = args
self._schema = None
self.config_file = default_config_file
self.role_dir = os.getcwd()
self.config = None
self.is_role = False
self.set_config()
self._set_config()
self.is_role = self._set_is_role() or False
def _get_args(self, args):
cleaned = dict(filter(lambda item: item[1] is not None, args.items()))
@ -202,10 +175,11 @@ class Config:
def _get_defaults(self):
normalized = {}
for key, item in self.SETTINGS.items():
if item.get("refresh"):
item["default"] = item["refresh"]()
normalized = self._add_dict_branch(normalized, key.split("."), item["default"])
# compute role_name default
normalized["role_name"] = os.path.basename(self.role_dir)
self.schema = anyconfig.gen_schema(normalized)
return normalized
@ -213,85 +187,61 @@ class Config:
normalized = {}
for key, item in self.SETTINGS.items():
if item.get("env"):
envname = f"{default_envs_prefix}{item['env']}"
prefix = "ANSIBLE_DOCTOR_"
envname = prefix + item["env"]
try:
value = item["type"](envname)
normalized = self._add_dict_branch(normalized, key.split("."), value)
except environs.EnvError as e:
if f'"{envname}" not set' in str(e):
if '"{}" not set'.format(envname) in str(e):
pass
else:
raise ansibledoctor.exception.ConfigError(
"Unable to read environment variable", str(e)
) from e
)
return normalized
def set_config(self, base_dir=None):
def _set_config(self):
args = self._get_args(self._args)
envs = self._get_envs()
defaults = self._get_defaults()
self.recursive = defaults.get("recursive")
if envs.get("recursive"):
self.recursive = envs.get("recursive")
if args.get("recursive"):
self.recursive = args.get("recursive")
if "recursive" in defaults:
defaults.pop("recursive")
self.config_file = defaults.get("config_file")
# preset config file path
if envs.get("config_file"):
self.config_file = self._normalize_path(envs.get("config_file"))
if envs.get("role_dir"):
self.role_dir = self._normalize_path(envs.get("role_dir"))
if args.get("config_file"):
self.config_file = self._normalize_path(args.get("config_file"))
if "config_file" in defaults:
defaults.pop("config_file")
self.base_dir = defaults.get("base_dir")
if envs.get("base_dir"):
self.base_dir = self._normalize_path(envs.get("base_dir"))
if args.get("base_dir"):
self.base_dir = self._normalize_path(args.get("base_dir"))
if base_dir:
self.base_dir = base_dir
if "base_dir" in defaults:
defaults.pop("base_dir")
self.is_role = os.path.isdir(os.path.join(self.base_dir, "tasks"))
# compute role_name default
defaults["role_name"] = os.path.basename(self.base_dir)
if args.get("role_dir"):
self.role_dir = self._normalize_path(args.get("role_dir"))
source_files = []
source_files.append((self.config_file, False))
source_files.append((os.path.join(os.getcwd(), ".ansibledoctor"), True))
source_files.append((os.path.join(os.getcwd(), ".ansibledoctor.yml"), True))
source_files.append((os.path.join(os.getcwd(), ".ansibledoctor.yaml"), True))
source_files.append(self.config_file)
source_files.append(os.path.join(os.getcwd(), ".ansibledoctor"))
source_files.append(os.path.join(os.getcwd(), ".ansibledoctor.yml"))
source_files.append(os.path.join(os.getcwd(), ".ansibledoctor.yaml"))
for config, first_found in source_files:
for config in source_files:
if config and os.path.exists(config):
with open(config, encoding="utf8") as stream:
with open(config, "r", encoding="utf8") as stream:
s = stream.read()
try:
file_dict = ruamel.yaml.YAML(typ="safe", pure=True).load(s)
file_dict = ruamel.yaml.safe_load(s)
except (
ruamel.yaml.composer.ComposerError,
ruamel.yaml.scanner.ScannerError,
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
) as e:
message = f"{e.context} {e.problem}"
message = "{} {}".format(e.context, e.problem)
raise ansibledoctor.exception.ConfigError(
f"Unable to read config file: {config}", message
) from e
"Unable to read config file {}".format(config), message
)
if self._validate(file_dict):
anyconfig.merge(defaults, file_dict, ac_merge=anyconfig.MS_DICTS)
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
self.config_file = config
if first_found:
break
if self._validate(envs):
anyconfig.merge(defaults, envs, ac_merge=anyconfig.MS_DICTS)
@ -299,9 +249,14 @@ class Config:
anyconfig.merge(defaults, args, ac_merge=anyconfig.MS_DICTS)
fix_files = ["output_dir", "template_dir", "custom_header"]
for filename in fix_files:
if defaults[filename] and defaults[filename] != "":
defaults[filename] = self._normalize_path(defaults[filename])
for file in fix_files:
if defaults[file] and defaults[file] != "":
defaults[file] = self._normalize_path(defaults[file])
if "config_file" in defaults:
defaults.pop("config_file")
if "role_dir" in defaults:
defaults.pop("role_dir")
defaults["logging"]["level"] = defaults["logging"]["level"].upper()
@ -311,33 +266,38 @@ class Config:
if not os.path.isabs(path):
base = os.path.join(os.getcwd(), path)
return os.path.abspath(os.path.expanduser(os.path.expandvars(base)))
else:
return path
return path
def _set_is_role(self):
if os.path.isdir(os.path.join(self.role_dir, "tasks")):
return True
def _validate(self, config):
try:
anyconfig.validate(config, self.schema, ac_schema_safe=False)
except jsonschema.exceptions.ValidationError as e:
schema = format_as_index(list(e.relative_schema_path)[:-1])
schema_error = f"Failed validating '{e.validator}' in schema {schema}\n{e.message}"
raise ansibledoctor.exception.ConfigError("Configuration error", schema_error) from e
schema_error = "Failed validating '{validator}' in schema{schema}\n{message}".format(
validator=e.validator,
schema=format_as_index(list(e.relative_schema_path)[:-1]),
message=e.message
)
raise ansibledoctor.exception.ConfigError("Configuration error", schema_error)
return True
def _add_dict_branch(self, tree, vector, value):
key = vector[0]
tree[key] = (
value
if len(vector) == 1
else self._add_dict_branch(tree.get(key, {}), vector[1:], value)
)
tree[key] = value \
if len(vector) == 1 \
else self._add_dict_branch(tree[key] if key in tree else {}, vector[1:], value)
return tree
def get_annotations_definition(self, automatic=True):
annotations = {}
if automatic:
for k, item in self.ANNOTATIONS.items():
if item.get("automatic"):
if "automatic" in item.keys() and item["automatic"]:
annotations[k] = item
return annotations
@ -345,7 +305,7 @@ class Config:
annotations = []
if automatic:
for k, item in self.ANNOTATIONS.items():
if item.get("automatic"):
if "automatic" in item.keys() and item["automatic"]:
annotations.append(k)
return annotations

View File

@ -9,12 +9,14 @@ from functools import reduce
import jinja2.exceptions
import ruamel.yaml
from jinja2 import Environment, FileSystemLoader
from jinja2 import Environment
from jinja2 import FileSystemLoader
from jinja2.filters import pass_eval_context
import ansibledoctor.exception
from ansibledoctor.config import SingleConfig
from ansibledoctor.utils import FileUtils, SingleLog
from ansibledoctor.utils import FileUtils
from ansibledoctor.utils import SingleLog
class Generator:
@ -38,32 +40,33 @@ class Generator:
"""
template_dir = self.config.get_template()
if os.path.isdir(template_dir):
self.logger.info(f"Using template dir: {template_dir}")
self.logger.info("Using template dir: {}".format(template_dir))
else:
self.log.sysexit_with_message(f"Can not open template dir {template_dir}")
self.log.sysexit_with_message("Can not open template dir {}".format(template_dir))
for file in glob.iglob(template_dir + "/**/*." + self.extension, recursive=True):
relative_file = file[len(template_dir) + 1 :]
relative_file = file[len(template_dir) + 1:]
if ntpath.basename(file)[:1] != "_":
self.logger.debug(f"Found template file: {relative_file}")
self.logger.debug("Found template file: " + relative_file)
self.template_files.append(relative_file)
else:
self.logger.debug(f"Ignoring template file: {relative_file}")
self.logger.debug("Ignoring template file: " + relative_file)
def _create_dir(self, directory):
if not self.config.config["dry_run"] and not os.path.isdir(directory):
try:
os.makedirs(directory, exist_ok=True)
self.logger.info(f"Creating dir: {directory}")
self.logger.info("Creating dir: " + directory)
except FileExistsError as e:
self.log.sysexit_with_message(e)
self.log.sysexit_with_message(str(e))
def _write_doc(self):
files_to_overwite = []
for file in self.template_files:
doc_file = os.path.join(
self.config.config.get("output_dir"), os.path.splitext(file)[0]
self.config.config.get("output_dir"),
os.path.splitext(file)[0]
)
if os.path.isfile(doc_file):
files_to_overwite.append(doc_file)
@ -74,80 +77,70 @@ class Generator:
if bool(header_file):
role_data["internal"]["append"] = True
try:
with open(header_file) as a:
with open(header_file, "r") as a:
header_content = a.read()
except FileNotFoundError as e:
self.log.sysexit_with_message(f"Can not open custom header file\n{e!s}")
self.log.sysexit_with_message("Can not open custom header file\n{}".format(str(e)))
if (
len(files_to_overwite) > 0
and self.config.config.get("force_overwrite") is False
and not self.config.config["dry_run"]
):
files_to_overwite_string = "\n".join(files_to_overwite)
prompt = f"These files will be overwritten:\n{files_to_overwite_string}".replace(
"\n", "\n... "
)
if len(files_to_overwite) > 0 and self.config.config.get("force_overwrite") is False:
if not self.config.config["dry_run"]:
self.logger.warn("This files will be overwritten:")
print(*files_to_overwite, sep="\n")
try:
if not FileUtils.query_yes_no(f"{prompt}\nDo you want to continue?"):
try:
if not FileUtils.query_yes_no("Do you want to continue?"):
self.log.sysexit_with_message("Aborted...")
except ansibledoctor.exception.InputError as e:
self.logger.debug(str(e))
self.log.sysexit_with_message("Aborted...")
except ansibledoctor.exception.InputError as e:
self.logger.debug(str(e))
self.log.sysexit_with_message("Aborted...")
for file in self.template_files:
doc_file = os.path.join(
self.config.config.get("output_dir"), os.path.splitext(file)[0]
self.config.config.get("output_dir"),
os.path.splitext(file)[0]
)
source_file = self.config.get_template() + "/" + file
self.logger.debug(f"Writing doc output to: {doc_file} from: {source_file}")
self.logger.debug("Writing doc output to: " + doc_file + " from: " + source_file)
# make sure the directory exists
self._create_dir(os.path.dirname(doc_file))
if os.path.exists(source_file) and os.path.isfile(source_file):
with open(source_file) as template:
with open(source_file, "r") as template:
data = template.read()
if data is not None:
try:
jenv = Environment( # nosec
loader=FileSystemLoader(self.config.get_template()),
lstrip_blocks=True,
trim_blocks=True,
autoescape=jinja2.select_autoescape(),
trim_blocks=True
)
jenv.filters["to_nice_yaml"] = self._to_nice_yaml
jenv.filters["deep_get"] = self._deep_get
jenv.filters["safe_join"] = self._safe_join
# keep the old name of the function to not break custom templates.
jenv.filters["save_join"] = self._safe_join
tabulate_vars = self.config.config.get("tabulate_variables")
data = jenv.from_string(data).render(
role_data, role=role_data, tabulate_vars=tabulate_vars
)
jenv.filters["save_join"] = self._save_join
data = jenv.from_string(data).render(role_data, role=role_data)
if not self.config.config["dry_run"]:
with open(doc_file, "wb") as outfile:
outfile.write(header_content.encode("utf-8"))
outfile.write(data.encode("utf-8"))
self.logger.info(f"Writing to: {doc_file}")
self.logger.info("Writing to: " + doc_file)
else:
self.logger.info(f"Writing to: {doc_file}")
self.logger.info("Writing to: " + doc_file)
except (
jinja2.exceptions.UndefinedError,
jinja2.exceptions.TemplateSyntaxError,
jinja2.exceptions.TemplateRuntimeError,
jinja2.exceptions.UndefinedError, jinja2.exceptions.TemplateSyntaxError
) as e:
self.log.sysexit_with_message(
f"Jinja2 templating error while loading file: '{file}'\n{e!s}"
"Jinja2 templating error while loading file: '{}'\n{}".format(
file, str(e)
)
)
except UnicodeEncodeError as e:
self.log.sysexit_with_message(
f"Unable to print special characters\n{e!s}"
"Unable to print special characters\n{}".format(str(e))
)
def _to_nice_yaml(self, a, indent=4, **kw):
def _to_nice_yaml(self, a, indent=4, *args, **kw):
"""Make verbose, human readable yaml."""
yaml = ruamel.yaml.YAML()
yaml.indent(mapping=indent, sequence=(indent * 2), offset=indent)
@ -155,27 +148,24 @@ class Generator:
yaml.dump(a, stream, **kw)
return stream.getvalue().rstrip()
def _deep_get(self, _, dictionary, keys):
def _deep_get(self, _, dictionary, keys, *args, **kw):
default = None
return reduce(
lambda d, key: d.get(key, default) if isinstance(d, dict) else default,
keys.split("."),
dictionary,
lambda d, key: d.get(key, default)
if isinstance(d, dict) else default, keys.split("."), dictionary
)
@pass_eval_context
def _safe_join(self, eval_ctx, value, d=""):
def _save_join(self, eval_ctx, value, d=u"", attribute=None):
if isinstance(value, str):
value = [value]
normalized = jinja2.filters.do_join(eval_ctx, value, d, attribute=None)
for s in [r" +(\n|\t| )", r"(\n|\t) +"]:
normalized = re.sub(s, "\\1", normalized)
if self.config.config["template_autotrim"]:
for s in [r" +(\n|\t| )", r"(\n|\t) +"]:
normalized = re.sub(s, "\\1", normalized)
return jinja2.filters.do_mark_safe(normalized)
return normalized
def render(self):
self.logger.info(f"Using output dir: {self.config.config.get('output_dir')}")
self.logger.info("Using output dir: " + self.config.config.get("output_dir"))
self._write_doc()

View File

@ -5,14 +5,16 @@ import fnmatch
from collections import defaultdict
import anyconfig
import ruamel.yaml
from nested_lookup import nested_lookup
from ansibledoctor.annotation import Annotation
from ansibledoctor.config import SingleConfig
from ansibledoctor.contstants import YAML_EXTENSIONS
from ansibledoctor.exception import YAMLError
from ansibledoctor.file_registry import Registry
from ansibledoctor.utils import SingleLog, flatten
from ansibledoctor.utils.yamlhelper import parse_yaml, parse_yaml_ansible
from ansibledoctor.utils import SingleLog
from ansibledoctor.utils import UnsafeTag
from ansibledoctor.utils import flatten
class Parser:
@ -30,63 +32,101 @@ class Parser:
self._parse_task_tags()
self._populate_doc_data()
def _yaml_remove_comments(self, d):
if isinstance(d, dict):
for k, v in d.items():
self._yaml_remove_comments(k)
self._yaml_remove_comments(v)
elif isinstance(d, list):
for elem in d:
self._yaml_remove_comments(elem)
try:
attr = "comment" if isinstance(
d, ruamel.yaml.scalarstring.ScalarString
) else ruamel.yaml.comments.Comment.attrib
delattr(d, attr)
except AttributeError:
pass
def _parse_var_files(self):
for rfile in self._files_registry.get_files():
if any(fnmatch.fnmatch(rfile, "*/defaults/*." + ext) for ext in YAML_EXTENSIONS):
with open(rfile, encoding="utf8") as yamlfile:
with open(rfile, "r", encoding="utf8") as yaml_file:
try:
raw = parse_yaml(yamlfile)
except YAMLError as e:
self.log.sysexit_with_message(f"Unable to read yaml file {rfile}\n{e}")
ruamel.yaml.add_constructor(
UnsafeTag.yaml_tag,
UnsafeTag.yaml_constructor,
constructor=ruamel.yaml.SafeConstructor
)
data = defaultdict(dict, raw or {})
raw = ruamel.yaml.YAML(typ="rt").load(yaml_file)
self._yaml_remove_comments(raw)
for key, value in data.items():
self._data["var"][key] = {"value": {key: value}}
data = defaultdict(dict, raw or {})
for key, value in data.items():
self._data["var"][key] = {"value": {key: value}}
except (
ruamel.yaml.composer.ComposerError,
ruamel.yaml.scanner.ScannerError,
ruamel.yaml.constructor.ConstructorError,
ruamel.yaml.constructor.DuplicateKeyError,
) as e:
message = "{} {}".format(e.context, e.problem)
self.log.sysexit_with_message(
"Unable to read yaml file {}\n{}".format(rfile, message)
)
def _parse_meta_file(self):
self._data["meta"]["name"] = {"value": self.config.config["role_name"]}
for rfile in self._files_registry.get_files():
if any("meta/main." + ext in rfile for ext in YAML_EXTENSIONS):
with open(rfile, encoding="utf8") as yamlfile:
with open(rfile, "r", encoding="utf8") as yaml_file:
try:
raw = parse_yaml(yamlfile)
except YAMLError as e:
self.log.sysexit_with_message(f"Unable to read yaml file {rfile}\n{e}")
raw = ruamel.yaml.YAML(typ="rt").load(yaml_file)
self._yaml_remove_comments(raw)
data = defaultdict(dict, raw)
if data.get("galaxy_info"):
for key, value in data.get("galaxy_info").items():
self._data["meta"][key] = {"value": value}
data = defaultdict(dict, raw)
if data.get("galaxy_info"):
for key, value in data.get("galaxy_info").items():
self._data["meta"][key] = {"value": value}
if data.get("dependencies") is not None:
self._data["meta"]["dependencies"] = {"value": data.get("dependencies")}
if data.get("dependencies") is not None:
self._data["meta"]["dependencies"] = {
"value": data.get("dependencies")
}
self._data["meta"]["name"] = {"value": self.config.config["role_name"]}
except (
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
) as e:
message = "{} {}".format(e.context, e.problem)
self.log.sysexit_with_message(
"Unable to read yaml file {}\n{}".format(rfile, message)
)
def _parse_task_tags(self):
for rfile in self._files_registry.get_files():
if any(fnmatch.fnmatch(rfile, "*/tasks/*." + ext) for ext in YAML_EXTENSIONS):
with open(rfile, encoding="utf8") as yamlfile:
with open(rfile, "r", encoding="utf8") as yaml_file:
try:
raw = parse_yaml_ansible(yamlfile)
except YAMLError as e:
self.log.sysexit_with_message(f"Unable to read yaml file {rfile}\n{e}")
raw = ruamel.yaml.YAML(typ="rt").load(yaml_file)
self._yaml_remove_comments(raw)
tags = [
task.get("tags")
for task in raw
if task.get("tags")
and task.get("tags") not in self.config.config["exclude_tags"]
]
for tag in flatten(tags):
self._data["tag"][tag] = {"value": tag}
tags = list(set(flatten(nested_lookup("tags", raw))))
for tag in tags:
self._data["tag"][tag] = {"value": tag}
except (
ruamel.yaml.composer.ComposerError, ruamel.yaml.scanner.ScannerError
) as e:
message = "{} {}".format(e.context, e.problem)
self.log.sysexit_with_message(
"Unable to read yaml file {}\n{}".format(rfile, message)
)
def _populate_doc_data(self):
"""Generate the documentation data object."""
tags = defaultdict(dict)
for annotation in self.config.get_annotations_names(automatic=True):
self.logger.info(f"Finding annotations for: @{annotation}")
self.logger.info("Finding annotations for: @" + annotation)
self._annotation_objs[annotation] = Annotation(
name=annotation, files_registry=self._files_registry
)
@ -95,7 +135,7 @@ class Parser:
try:
anyconfig.merge(self._data, tags, ac_merge=anyconfig.MS_DICTS)
except ValueError as e:
self.log.sysexit_with_message(f"Unable to merge annotation values:\n{e}")
self.log.sysexit_with_message("Unable to merge annotation values:\n{}".format(e))
def get_data(self):
return self._data

View File

@ -1,21 +1,15 @@
#!/usr/bin/env python3
"""Doctor exception module."""
"""Custom exceptions."""
class DoctorError(Exception):
"""Define generic exception."""
"""Generic exception class for ansible-doctor."""
def __init__(self, msg, original_exception=""):
super().__init__(f"{msg}\n{original_exception}")
super(DoctorError, self).__init__("{msg}\n{org}".format(msg=msg, org=original_exception))
self.original_exception = original_exception
class YAMLError(DoctorError):
"""Errors while reading a yaml file."""
pass
class ConfigError(DoctorError):
"""Errors related to config file handling."""

View File

@ -35,20 +35,24 @@ class Registry:
:return: None
"""
extensions = YAML_EXTENSIONS
base_dir = self.config.base_dir
role_name = os.path.basename(base_dir)
role_dir = self.config.role_dir
role_name = os.path.basename(role_dir)
excludes = self.config.config.get("exclude_files")
excludespec = pathspec.PathSpec.from_lines("gitwildmatch", excludes)
self.log.debug(f"Scan for files: {base_dir}")
self.log.debug("Scan for files: " + role_dir)
for extension in extensions:
pattern = os.path.join(base_dir, "**/*." + extension)
pattern = os.path.join(role_dir, "**/*." + extension)
for filename in glob.iglob(pattern, recursive=True):
if not excludespec.match_file(filename):
self.log.debug(
f"Adding file to '{role_name}': {os.path.relpath(filename, base_dir)}"
"Adding file to '{}': {}".format(
role_name, os.path.relpath(filename, role_dir)
)
)
self._doc.append(filename)
else:
self.log.debug(f"Excluding file: {os.path.relpath(filename, base_dir)}")
self.log.debug(
"Excluding file: {}".format(os.path.relpath(filename, role_dir))
)

View File

@ -1,7 +0,0 @@
## Requirements
{% if meta | deep_get(meta, "min_ansible_version.value") %}
- Minimum Ansible version: `{{ meta.min_ansible_version.value }}`
{% else %}
None.
{% endif %}

View File

@ -4,8 +4,8 @@
{% for key, item in tag | dictsort %}
{{ key }}
{% if item.description is defined and item.description | safe_join(" ") | striptags %}
: {{ item.description | safe_join(" ") | striptags }}
{% if item.description is defined and item.description | save_join(" ") | striptags %}
: {{ item.description | save_join(" ") | striptags }}
{% else %}
: &nbsp;
{% endif %}

View File

@ -1,13 +1,10 @@
- [Requirements](#requirements)
{% set var = role.var | default({}) %}
{% if var %}
- [Default Variables](#default-variables)
{% if not tabulate_vars %}
{% for key, item in var | dictsort %}
- [{{ key }}](#{{ key }})
{% endfor %}
{% endif %}
{% endif %}
{% if tag %}
- [Discovered Tags](#discovered-tags)
{% endif %}

View File

@ -4,15 +4,15 @@
{% for key, item in todo | dictsort %}
{% for line in item %}
{% if line.value is defined and line.value | safe_join(" ") | striptags and key == "default" %}
- {{ line.value | safe_join(" ") | striptags }}
{% if line.value is defined and line.value | save_join(" ") | striptags and key == "default" %}
- {{ line.value | save_join(" ") | striptags }}
{% endif %}
{% endfor %}
{% endfor %}
{% for key, item in todo | dictsort %}
{% for line in item %}
{% if line.value is defined and line.value | safe_join(" ") | striptags and key != "default" %}
- ({{ key }}): {{ line.value | safe_join(" ") | striptags }}
{% if line.value is defined and line.value | save_join(" ") | striptags and key != "default" %}
- ({{ key }}): {{ line.value | save_join(" ") | striptags }}
{% endif %}
{% endfor %}
{% endfor %}

View File

@ -1,29 +1,13 @@
{% set var = role.var | default({}) %}
{% if var %}
## Default Variables
{% for key, item in var | dictsort %}
### {{ key }}
{% if item.description is defined and item.description %}
{% set description = [item.description] if item.description is string else item.description %}
{{ description | map("replace", "\n\n", "\n") | safe_join("\n") }}
{% endif %}
{% if item.deprecated is defined or item.type is defined %}
{% if item.deprecated is defined %}
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
{% set deprecated_string = deprecated | map("replace", "\n\n", "\n") | safe_join("\n") %}
{% if deprecated_string %}
**_Deprecated:_** {{ deprecated_string }}<br />
{% else %}
**_Deprecated_**<br />
{% endif %}
{% endif %}
{% if item.type is defined and item.type %}
{% set type = [item.type] if item.type is string else item.type %}
**_Type:_** {{ type | map("replace", "\n\n", "\n") | safe_join("\n") }}<br />
{% endif %}
{{ item.description | save_join(" ") }}
{% endif %}
{% if item.value is defined and item.value %}

View File

@ -1,49 +0,0 @@
{% set var = role.var | default({}) %}
{% if var %}
## Default Variables
{% set columns = ["variable", "default", "description", "type", "deprecated", "example"] %}
{% set found_columns = ["variable", "default"] + var.values() | map("list") | sum(start=["key"]) | unique | list %}
{% for c in columns %}
{% if c in found_columns %}
|{{ c | capitalize -}}
{% endif %}
{% endfor %}
|
{% for c in columns %}
{% if c in found_columns %}
|{{ "-" * (c | length) -}}
{% endif %}
{% endfor %}
|
{% for key, item in var | dictsort %}
|{{ key -}}
|{{ (item.value | default({}))[key] | default -}}
{% if "description" in found_columns %}
|{{ item.description | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
{% endif %}
{% if "type" in found_columns %}
|{{ item.type | default([]) | join("<br />") -}}
{% endif %}
{% if "deprecated" in found_columns %}
|
{%- if "deprecated" in found_columns %}
{% if item.deprecated is defined %}
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
{% set deprecated_string = deprecated | map("replace", "\n", "<br />") | safe_join("<br />") %}
{% if deprecated_string -%}
{{ deprecated_string }}
{%- else -%}
True
{%- endif %}
{%- else -%}
False
{%- endif %}
{% endif %}
{% endif %}
{% if "example" in found_columns %}
|{{ item.example | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
{% endif %}
|
{% endfor %}
{% endif %}

View File

@ -1,35 +1,22 @@
{% if not append | deep_get(role, "internal.append") %}
{% set meta = role.meta | default({}) %}
---
title: {{ meta.name.value | safe_join(" ") }}
title: {{ meta.name.value | save_join(" ") }}
type: docs
{% if summary | deep_get(meta, "summary.value") %}
summary: {{ meta.summary.value | safe_join(" ") }}
{% endif %}
---
{% endif %}
{% if description | deep_get(meta, "description.value") %}
{% set description = [meta.description.value] if meta.description.value is string else meta.description.value %}
{{ description | map("replace", "\n\n", "\n") | safe_join("\n") }}
{{ meta.description.value | save_join(" ") }}
{% endif %}
<!--more-->
{# TOC #}
{% include '_toc.j2' +%}
{# Requirements #}
{% include '_requirements.j2' %}
{% include '_toc.j2' %}
{# Vars #}
{% if tabulate_vars %}
{% include '_vars_tabulated.j2' %}
{% else %}
{% include '_vars.j2' %}
{% endif %}
{# Tag #}
{# Todo #}
{% include '_tag.j2' %}
{# Todo #}

View File

@ -1,27 +1,19 @@
{% if not append | deep_get(role, "internal.append") %}
{% set meta = role.meta | default({}) %}
# {{ meta.name.value | safe_join(" ") }}
# {{ meta.name.value | save_join(" ") }}
{% endif %}
{% if description | deep_get(meta, "description.value") %}
{% set description = [meta.description.value] if meta.description.value is string else meta.description.value %}
{{ description | map("replace", "\n\n", "\n") | safe_join("\n") }}
{{ meta.description.value | save_join(" ") }}
{% endif %}
{# TOC #}
{% include '_toc.j2' +%}
{# Requirements #}
{% include '_requirements.j2' %}
{% include '_toc.j2' %}
{# Vars #}
{% if tabulate_vars %}
{% include '_vars_tabulated.j2' %}
{% else %}
{% include '_vars.j2' %}
{% endif %}
{# Tag #}
{# Todo #}
{% include '_tag.j2' %}
{# Todo #}

View File

@ -9,9 +9,7 @@
{% set deps = meta.dependencies.value %}
{% endif %}
{% for item in deps %}
{% if item is string or item.role %}
- {{ item if item is string else item.role }}
{% endif %}
- {{ item }}
{% endfor %}
{% else %}
None.
@ -20,12 +18,12 @@ None.
## License
{{ meta.license.value | safe_join(" ") }}
{{ meta.license.value }}
{% endif %}
{% if author | deep_get(meta, "author.value") %}
## Author
{{ meta.author.value | safe_join(" ") }}
{{ meta.author.value | save_join(" ") }}
{% endif %}
{% endif %}

View File

@ -1,7 +0,0 @@
## Requirements
{% if meta | deep_get(meta, "min_ansible_version.value") %}
- Minimum Ansible version: `{{ meta.min_ansible_version.value }}`
{% else %}
None.
{% endif %}

View File

@ -2,11 +2,11 @@
{% if tag %}
## Discovered Tags
{% for key, item in tag | dictsort %}
{% set is_desc = item.description is defined and item.description | safe_join(" ") | striptags %}
{% set is_desc = item.description is defined and item.description | save_join(" ") | striptags %}
**_{{ key }}_**{{ "\\" if is_desc else "" }}
{% if is_desc %}
&emsp;{{ item.description | safe_join(" ") | striptags }}
&emsp;{{ item.description | save_join(" ") | striptags }}
{% endif %}
{% endfor %}
{% endif %}

View File

@ -1,15 +1,12 @@
## Table of content
- [Requirements](#requirements)
{% set var = role.var | default({}) %}
{% if var %}
- [Default Variables](#default-variables)
{% if not tabulate_vars %}
{% for key, item in var | dictsort %}
- [{{ key }}](#{{ key }})
{% endfor %}
{% endif %}
{% endif %}
{% if tag %}
- [Discovered Tags](#discovered-tags)
{% endif %}

View File

@ -4,15 +4,15 @@
{% for key, item in todo | dictsort %}
{% for line in item %}
{% if line.value is defined and line.value | safe_join(" ") | striptags and key == "default" %}
- {{ line.value | safe_join(" ") | striptags }}
{% if line.value is defined and line.value | save_join(" ") | striptags and key == "default" %}
- {{ line.value | save_join(" ") | striptags }}
{% endif %}
{% endfor %}
{% endfor %}
{% for key, item in todo | dictsort %}
{% for line in item %}
{% if line.value is defined and line.value | safe_join(" ") | striptags and key != "default" %}
- ({{ key }}): {{ line.value | safe_join(" ") | striptags }}
{% if line.value is defined and line.value | save_join(" ") | striptags and key != "default" %}
- ({{ key }}): {{ line.value | save_join(" ") | striptags }}
{% endif %}
{% endfor %}
{% endfor %}

View File

@ -1,29 +1,13 @@
{% set var = role.var | default({}) %}
{% if var %}
## Default Variables
{% for key, item in var | dictsort %}
### {{ key }}
{% if item.description is defined and item.description %}
{% set description = [item.description] if item.description is string else item.description %}
{{ description | map("replace", "\n\n", "\n") | safe_join("\n") }}
{% endif %}
{% if item.deprecated is defined or item.type is defined %}
{% if item.deprecated is defined %}
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
{% set deprecated_string = deprecated | map("replace", "\n\n", "\n") | safe_join("\n") %}
{% if deprecated_string %}
**_Deprecated:_** {{ deprecated_string }}<br />
{% else %}
**_Deprecated_**<br />
{% endif %}
{% endif %}
{% if item.type is defined and item.type %}
{% set type = [item.type] if item.type is string else item.type %}
**_Type:_** {{ type | map("replace", "\n\n", "\n") | safe_join("\n") }}<br />
{% endif %}
{{ item.description | save_join(" ") }}
{% endif %}
{% if item.value is defined and item.value %}

View File

@ -1,49 +0,0 @@
{% set var = role.var | default({}) %}
{% if var %}
## Default Variables
{% set columns = ["variable", "default", "description", "type", "deprecated", "example"] %}
{% set found_columns = ["variable", "default"] + var.values() | map("list") | sum(start=["key"]) | unique | list %}
{% for c in columns %}
{% if c in found_columns %}
|{{ c | capitalize -}}
{% endif %}
{% endfor %}
|
{% for c in columns %}
{% if c in found_columns %}
|{{ "-" * (c | length) -}}
{% endif %}
{% endfor %}
|
{% for key, item in var | dictsort %}
|{{ key -}}
|{{ (item.value | default({}))[key] | default -}}
{% if "description" in found_columns %}
|{{ item.description | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
{% endif %}
{% if "type" in found_columns %}
|{{ item.type | default([]) | join("<br />") -}}
{% endif %}
{% if "deprecated" in found_columns %}
|
{%- if "deprecated" in found_columns %}
{% if item.deprecated is defined %}
{% set deprecated = [item.deprecated] if item.deprecated is string else item.deprecated %}
{% set deprecated_string = deprecated | map("replace", "\n", "<br />") | safe_join("<br />") %}
{% if deprecated_string -%}
{{ deprecated_string }}
{%- else -%}
True
{%- endif %}
{%- else -%}
False
{%- endif %}
{% endif %}
{% endif %}
{% if "example" in found_columns %}
|{{ item.example | default([]) | safe_join("<br />") | replace("\n", "<br />") | replace("|", "\|") -}}
{% endif %}
|
{% endfor %}
{% endif %}

View File

@ -4,41 +4,22 @@
import logging
import os
import sys
from collections.abc import Iterable
from distutils.util import strtobool
import colorama
from pythonjsonlogger import jsonlogger
try:
from typing import Iterable
except ImportError:
from collections import Iterable
import ansibledoctor.exception
CONSOLE_FORMAT = "{}{}[%(levelname)s]{} %(message)s"
JSON_FORMAT = "%(asctime)s %(levelname)s %(message)s"
def strtobool(value):
"""Convert a string representation of truth to true or false."""
_map = {
"y": True,
"yes": True,
"t": True,
"true": True,
"on": True,
"1": True,
"n": False,
"no": False,
"f": False,
"false": False,
"off": False,
"0": False,
}
try:
return _map[str(value).lower()]
except KeyError as err:
raise ValueError(f'"{value}" is not a valid bool value') from err
def to_bool(string):
return bool(strtobool(str(string)))
@ -46,7 +27,8 @@ def to_bool(string):
def flatten(items):
for x in items:
if isinstance(x, Iterable) and not isinstance(x, (str, bytes)):
yield from flatten(x)
for sub_x in flatten(x):
yield sub_x
else:
yield x
@ -59,40 +41,6 @@ def _should_do_markup():
return sys.stdout.isatty() and os.environ.get("TERM") != "dumb"
def _split_string(string, delimiter, escape, maxsplit=None):
result = []
current_element = []
iterator = iter(string)
count_split = 0
skip_split = False
for character in iterator:
if maxsplit and count_split >= maxsplit:
skip_split = True
if character == escape and not skip_split:
try:
next_character = next(iterator)
if next_character != delimiter and next_character != escape:
# Do not copy the escape character if it is intended to escape either the
# delimiter or the escape character itself. Copy the escape character
# if it is not used to escape either of these characters.
current_element.append(escape)
current_element.append(next_character)
count_split += 1
except StopIteration:
current_element.append(escape)
elif character == delimiter and not skip_split:
result.append("".join(current_element))
current_element = []
count_split += 1
else:
current_element.append(character)
result.append("".join(current_element))
return result
colorama.init(autoreset=True, strip=not _should_do_markup())
@ -103,12 +51,12 @@ class Singleton(type):
def __call__(cls, *args, **kwargs):
if cls not in cls._instances:
cls._instances[cls] = super().__call__(*args, **kwargs)
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instances[cls]
class LogFilter:
"""Exclude log messages above the logged level."""
class LogFilter(object):
"""A custom log filter which excludes log messages above the logged level."""
def __init__(self, level):
"""
@ -126,17 +74,17 @@ class LogFilter:
class MultilineFormatter(logging.Formatter):
"""Reset color after newline characters."""
"""Logging Formatter to reset color after newline characters."""
def format(self, record):
record.msg = record.msg.strip().replace("\n", f"\n{colorama.Style.RESET_ALL}... ")
def format(self, record): # noqa
record.msg = record.msg.replace("\n", "\n{}... ".format(colorama.Style.RESET_ALL))
return logging.Formatter.format(self, record)
class MultilineJsonFormatter(jsonlogger.JsonFormatter):
"""Remove newline characters."""
"""Logging Formatter to remove newline characters."""
def format(self, record):
def format(self, record): # noqa
record.msg = record.msg.replace("\n", " ")
return jsonlogger.JsonFormatter.format(self, record)
@ -144,11 +92,11 @@ class MultilineJsonFormatter(jsonlogger.JsonFormatter):
class Log:
"""Handle logging."""
def __init__(self, level=logging.WARNING, name="ansibledoctor", json=False):
def __init__(self, level=logging.WARN, name="ansibledoctor", json=False):
self.logger = logging.getLogger(name)
self.logger.setLevel(level)
self.logger.addHandler(self._get_error_handler(json=json))
self.logger.addHandler(self._get_warning_handler(json=json))
self.logger.addHandler(self._get_warn_handler(json=json))
self.logger.addHandler(self._get_info_handler(json=json))
self.logger.addHandler(self._get_critical_handler(json=json))
self.logger.addHandler(self._get_debug_handler(json=json))
@ -173,13 +121,13 @@ class Log:
return handler
def _get_warning_handler(self, json=False):
def _get_warn_handler(self, json=False):
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.WARNING)
handler.addFilter(LogFilter(logging.WARNING))
handler.setLevel(logging.WARN)
handler.addFilter(LogFilter(logging.WARN))
handler.setFormatter(
MultilineFormatter(
self.warning(
self.warn(
CONSOLE_FORMAT.format(
colorama.Fore.YELLOW, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
@ -236,7 +184,7 @@ class Log:
handler.addFilter(LogFilter(logging.DEBUG))
handler.setFormatter(
MultilineFormatter(
self.debug(
self.critical(
CONSOLE_FORMAT.format(
colorama.Fore.BLUE, colorama.Style.BRIGHT, colorama.Style.RESET_ALL
)
@ -264,8 +212,8 @@ class Log:
"""Format error messages and return string."""
return msg
def warning(self, msg):
"""Format warning messages and return string."""
def warn(self, msg):
"""Format warn messages and return string."""
return msg
def info(self, msg):
@ -281,13 +229,13 @@ class Log:
:returns: string
"""
return f"{color}{msg}{colorama.Style.RESET_ALL}"
return "{}{}{}".format(color, msg, colorama.Style.RESET_ALL)
def sysexit(self, code=1):
sys.exit(code)
def sysexit_with_message(self, msg, code=1):
self.logger.critical(str(msg).strip())
self.logger.critical(str(msg))
self.sysexit(code)
@ -297,6 +245,19 @@ class SingleLog(Log, metaclass=Singleton):
pass
class UnsafeTag:
"""Handle custom yaml unsafe tag."""
yaml_tag = u"!unsafe"
def __init__(self, value):
self.unsafe = value
@staticmethod
def yaml_constructor(loader, node):
return loader.construct_scalar(node)
class FileUtils:
"""Mics static methods for file handling."""
@ -306,8 +267,7 @@ class FileUtils:
@staticmethod
def query_yes_no(question, default=True):
"""
Ask a yes/no question via input() and return their answer.
"""Ask a yes/no question via input() and return their answer.
"question" is a string that is presented to the user.
"default" is the presumed answer if the user just hits <Enter>.
@ -316,11 +276,14 @@ class FileUtils:
The "answer" return value is one of "yes" or "no".
"""
prompt = "[Y/n]" if default else "[N/y]"
if default:
prompt = "[Y/n]"
else:
prompt = "[N/y]"
try:
# input method is safe in python3
choice = input(f"{question} {prompt} ") or default # nosec
choice = input("{} {} ".format(question, prompt)) or default # nosec
return to_bool(choice)
except (KeyboardInterrupt, ValueError) as e:
raise ansibledoctor.exception.InputError("Error while reading input", e) from e
raise ansibledoctor.exception.InputError("Error while reading input", e)

View File

@ -1,85 +0,0 @@
"""Utils for YAML file operations."""
from collections import defaultdict
from contextlib import suppress
import ruamel.yaml
from ansible.parsing.yaml.loader import AnsibleLoader
import ansibledoctor.exception
class UnsafeTag:
"""Handle custom yaml unsafe tag."""
yaml_tag = "!unsafe"
def __init__(self, value):
self.unsafe = value
@staticmethod
def yaml_constructor(loader, node):
return loader.construct_scalar(node)
def parse_yaml_ansible(yamlfile):
try:
loader = AnsibleLoader(yamlfile)
data = loader.get_single_data() or []
except (
ruamel.yaml.parser.ParserError,
ruamel.yaml.scanner.ScannerError,
ruamel.yaml.constructor.ConstructorError,
ruamel.yaml.composer.ComposerError,
) as e:
message = (
f"{e.context} in line {e.context_mark.line}, column {e.context_mark.line}\n"
f"{e.problem} in line {e.problem_mark.line}, column {e.problem_mark.column}"
)
raise ansibledoctor.exception.YAMLError(message) from e
return data
def parse_yaml(yamlfile):
try:
ruamel.yaml.add_constructor(
UnsafeTag.yaml_tag,
UnsafeTag.yaml_constructor,
constructor=ruamel.yaml.SafeConstructor,
)
data = ruamel.yaml.YAML(typ="rt").load(yamlfile)
_yaml_remove_comments(data)
data = defaultdict(dict, data or {})
except (
ruamel.yaml.parser.ParserError,
ruamel.yaml.scanner.ScannerError,
ruamel.yaml.constructor.ConstructorError,
ruamel.yaml.composer.ComposerError,
) as e:
message = (
f"{e.context} in line {e.context_mark.line}, column {e.context_mark.line}\n"
f"{e.problem} in line {e.problem_mark.line}, column {e.problem_mark.column}"
)
raise ansibledoctor.exception.YAMLError(message) from e
return data
def _yaml_remove_comments(d):
if isinstance(d, dict):
for k, v in d.items():
_yaml_remove_comments(k)
_yaml_remove_comments(v)
elif isinstance(d, list):
for elem in d:
_yaml_remove_comments(elem)
with suppress(AttributeError):
attr = (
"comment"
if isinstance(d, ruamel.yaml.scalarstring.ScalarString)
else ruamel.yaml.comments.Comment.attrib
)
delattr(d, attr)

View File

@ -1,4 +1,4 @@
FROM python:3.12-alpine@sha256:5365725a6cd59b72a927628fdda9965103e3dc671676c89ef3ed8b8b0e22e812
FROM python:3.10-alpine@sha256:9316f0d151250a0b5a6c6bc26ed11f7e1cb29e856fa0da48fa9d084a3c67d46d
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
@ -14,7 +14,7 @@ ADD dist/ansible_doctor-*.whl /
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev && \
pip install --upgrade --no-cache-dir pip && \
pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl")[ansible-core] && \
pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl") && \
rm -f ansible_doctor-*.whl && \
rm -rf /var/cache/apk/* && \
rm -rf /root/.cache/

24
docker/Dockerfile.arm Normal file
View File

@ -0,0 +1,24 @@
FROM arm32v7/python:3.10-alpine@sha256:b927a8af106c63d29d646c8e22f6a318cd350cf5e4f7d6dc49817b6e6aa1636e
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.title="ansible-doctor"
LABEL org.opencontainers.image.url="https://ansible-doctor.geekdocs.de/"
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-doctor"
LABEL org.opencontainers.image.documentation="https://ansible-doctor.geekdocs.de/"
ENV PY_COLORS=1
ENV TZ=UTC
ADD dist/ansible_doctor-*.whl /
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev && \
pip install --upgrade --no-cache-dir pip && \
pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl") && \
rm -f ansible_doctor-*.whl && \
rm -rf /var/cache/apk/* && \
rm -rf /root/.cache/
USER root
CMD []
ENTRYPOINT ["/usr/local/bin/ansible-doctor"]

24
docker/Dockerfile.arm64 Normal file
View File

@ -0,0 +1,24 @@
FROM arm64v8/python:3.10-alpine@sha256:289e6bac44dac26276f81db95eebefa81fb758f22504c0a45f6da8e99af854dc
LABEL maintainer="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.authors="Robert Kaussow <mail@thegeeklab.de>"
LABEL org.opencontainers.image.title="ansible-doctor"
LABEL org.opencontainers.image.url="https://ansible-doctor.geekdocs.de/"
LABEL org.opencontainers.image.source="https://github.com/thegeeklab/ansible-doctor"
LABEL org.opencontainers.image.documentation="https://ansible-doctor.geekdocs.de/"
ENV PY_COLORS=1
ENV TZ=UTC
ADD dist/ansible_doctor-*.whl /
RUN apk --update add --virtual .build-deps build-base libffi-dev openssl-dev && \
pip install --upgrade --no-cache-dir pip && \
pip install --no-cache-dir $(find / -name "ansible_doctor-*.whl") && \
rm -f ansible_doctor-*.whl && \
rm -rf /var/cache/apk/* && \
rm -rf /root/.cache/
USER root
CMD []
ENTRYPOINT ["/usr/local/bin/ansible-doctor"]

24
docker/manifest-quay.tmpl Normal file
View File

@ -0,0 +1,24 @@
image: quay.io/thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: quay.io/thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
platform:
architecture: amd64
os: linux
- image: quay.io/thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: quay.io/thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
platform:
architecture: arm
os: linux
variant: v7

24
docker/manifest.tmpl Normal file
View File

@ -0,0 +1,24 @@
image: thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}{{else}}latest{{/if}}
{{#if build.tags}}
tags:
{{#each build.tags}}
- {{this}}
{{/each}}
{{/if}}
manifests:
- image: thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}amd64
platform:
architecture: amd64
os: linux
- image: thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm64
platform:
architecture: arm64
os: linux
variant: v8
- image: thegeeklab/ansible-doctor:{{#if build.tag}}{{trimPrefix "v" build.tag}}-{{/if}}arm
platform:
architecture: arm
os: linux
variant: v7

View File

@ -2,7 +2,7 @@
title: Documentation
---
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-doctor?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-doctor)
[![Docker Hub](https://img.shields.io/badge/dockerhub-latest-blue.svg?logo=docker&logoColor=white)](https://hub.docker.com/r/thegeeklab/ansible-doctor)
[![Quay.io](https://img.shields.io/badge/quay-latest-blue.svg?logo=docker&logoColor=white)](https://quay.io/repository/thegeeklab/ansible-doctor)
[![Python Version](https://img.shields.io/pypi/pyversions/ansible-doctor.svg)](https://pypi.org/project/ansible-doctor/)

View File

@ -4,9 +4,10 @@ title: Using docker
```Shell
docker run \
-e ANSIBLE_DOCTOR_BASE_DIR=example/demo-role/ \
-e ANSIBLE_DOCTOR_ROLE_DIR=example/demo-role/ \
-e ANSIBLE_DOCTOR_OUTPUT_DIR=example/ \
-e ANSIBLE_DOCTOR_FORCE_OVERWRITE=true \
-e ANSIBLE_DOCTOR_CUSTOM_HEADER=HEADER.md \
-e ANSIBLE_DOCTOR_CUSTOM_HEADER=example/demo-role/HEADER.md \
-e ANSIBLE_DOCTOR_LOG_LEVEL=info \
-e PY_COLORS=1 \
-v $(pwd):/doctor \
@ -14,6 +15,7 @@ docker run \
thegeeklab/ansible-doctor
```
{{< hint type=note >}}
{{< hint info >}}
**Info**\
Keep in mind, that SELinux labels (`:Z` or `:z`) need to be passed as mount option on SELinux enabled systems.
{{< /hint >}}

View File

@ -4,12 +4,11 @@ title: Using pip
```Shell
# From PyPI as unprivileged user
$ pip install ansible-doctor[ansible-core] --user
$ pip install ansible-doctor --user
# .. or as root
$ sudo pip install ansible-doctor[ansible-core]
$ sudo pip install ansible-doctor
# From Wheel file
# Please check first whether a newer version is available.
$ pip install https://github.com/thegeeklab/ansible-doctor/releases/download/v3.1.4/ansible_doctor-3.1.4-py2.py3-none-any.whl[ansible-core]
$ pip install https://github.com/thegeeklab/ansible-doctor/releases/download/v0.1.1/ansible_doctor-0.1.1-py2.py3-none-any.whl
```

View File

@ -18,40 +18,33 @@ Configuration options can be set in different places, which are processed in the
```YAML
---
# Default is the current working directory.
base_dir:
# Default is the basename of 'role_name'.
# default is the current working directory
role_dir:
# default is the basename of 'role_name'
role_name:
# Auto-detect if the given directory is a role, can be disabled
# to parse loose files instead.
role_detection: True
# Don't write anything to file system
# don't write anything to file system
dry_run: False
logging:
# Possible options debug | info | warning | error | critical
# possible options debug | info | warning | error | critical
level: "warning"
# Json logging can be enabled if a parsable output is required
# json logging can be enabled if a parsable output is required
json: False
# Path to write rendered template file. Default is the current working directory.
# path to write rendered template file
# default is the current working directory
output_dir:
# Default is built-in templates directory.
# default is in-build templates directory
template_dir:
template: readme
# By default, double spaces, spaces before and after line breaks or tab characters, etc.
# are automatically removed before the template is rendered. As a result, indenting
# with spaces does not work. If you want to use spaces to indent text, you must disable
# this option.
template_autotrim: True
# Configures whether to tabulate variables in the output. When set to `True`,
# variables will be displayed in a tabular format intsead of plain marktdown sections.
# NOTE: This option does not support rendering multiline code blocks.
tabulate_variables: False
# Don't ask to overwrite if output file exists.
# don't ask to overwrite if output file exists
force_overwrite: False
# Load custom header from given file and append template output to it before write.
# load custom header from given file and append template output
# to it before write.
custom_header: ""
exclude_files: []
@ -59,30 +52,25 @@ exclude_files: []
# exclude_files:
# - molecule/
# - files/**/*.py
# Exclude tags from automatic detection. Configured tags are only skipped
# if the tag is not used in an annotation.
exclude_tags: []
```
## CLI
```Shell
$ ansible-doctor --help
usage: ansible-doctor [-h] [-c CONFIG_FILE] [-o OUTPUT_DIR] [-r] [-f] [-d] [-n] [-v] [-q] [--version] [base_dir]
usage: ansible-doctor [-h] [-c CONFIG_FILE] [-o OUTPUT_DIR] [-f] [-d] [-n] [-v] [-q] [--version] [role_dir]
Generate documentation from annotated Ansible roles using templates
positional arguments:
base_dir base directory (default: current working directory)
role_dir role directory (default: current working dir)
options:
optional arguments:
-h, --help show this help message and exit
-c CONFIG_FILE, --config CONFIG_FILE
path to configuration file
location of configuration file
-o OUTPUT_DIR, --output OUTPUT_DIR
output directory
-r, --recursive run recursively over the base directory subfolders
output base dir
-f, --force force overwrite output file
-d, --dry-run dry run without writing
-n, --no-role-detection
@ -97,8 +85,7 @@ options:
```Shell
ANSIBLE_DOCTOR_CONFIG_FILE=
ANSIBLE_DOCTOR_ROLE_DETECTION=true
ANSIBLE_DOCTOR_BASE_DIR=
ANSIBLE_DOCTOR_RECURSIVE=false
ANSIBLE_DOCTOR_ROLE_DIR=
ANSIBLE_DOCTOR_ROLE_NAME=
ANSIBLE_DOCTOR_DRY_RUN=false
ANSIBLE_DOCTOR_LOG_LEVEL=warning
@ -106,30 +93,8 @@ ANSIBLE_DOCTOR_LOG_JSON=false
ANSIBLE_DOCTOR_OUTPUT_DIR=
ANSIBLE_DOCTOR_TEMPLATE_DIR=
ANSIBLE_DOCTOR_TEMPLATE=readme
ANSIBLE_DOCTOR_TEMPLATE_AUTOTRIM=true
ANSIBLE_DOCTOR_TABULATE_VARIABLES=false
ANSIBLE_DOCTOR_FORCE_OVERWRITE=false
ANSIBLE_DOCTOR_CUSTOM_HEADER=
ANSIBLE_DOCTOR_EXCLUDE_FILES=
ANSIBLE_DOCTOR_EXCLUDE_FILES=molecule/,files/**/*.py
```
## Pre-Commit setup
To use _ansible-doctor_ with the [pre-commit](https://pre-commit.com/) framework, add the following to the `.pre-commit-config.yaml` file in your local repository.
<!-- prettier-ignore-start -->
<!-- markdownlint-disable -->
<!-- spellchecker-disable -->
{{< highlight yaml "linenos=table" >}}
- repo: https://github.com/thegeeklab/ansible-doctor
# update version with `pre-commit autoupdate`
rev: v4.0.4
hooks:
- id: ansible-doctor
{{< /highlight >}}
<!-- spellchecker-enable -->
<!-- markdownlint-restore -->
<!-- prettier-ignore-end -->

View File

@ -12,7 +12,7 @@ ansible-doctor FOLDER
If no folder is passed to _ansible-doctor_, the current working directory is used. The first step is to determine if the specified folder is an Ansible role. This check is very simple and only verifies if there is a sub-directory named `tasks` in the specified folder. After a successful check, _ansible-doctor_ registers all files of the role to search them for annotations.
Without any further work _ansible-doctor_ can already create a documentation of the available variables and some meta information if the role contains. This basic information can be extended with a set of available annotations. If you want to see it in action you can find a [demo role](https://github.com/thegeeklab/ansible-doctor/tree/main/example) with a lot of examples in the repository.
Without any further work _ansible-doctor_ can already create a documentation of the available variables and some meta information if the role contains [meta information](https://galaxy.ansible.com/docs/contributing/creating_role.html#role-metadata). This basic information can be extended with a set of available annotations.
## Annotations
@ -33,7 +33,7 @@ option1
# the default description with an annotation.
# @end
# @meta author:value: [John Doe](https://blog.example.com)
# @meta author: [John Doe](https://blog.example.com)
```
### `@var`
@ -44,28 +44,16 @@ option1
: the name of the variable to which additional information should be added
option2
: supports `["value", "example", "description", "type", "deprecated"]` as information scopes
: supports `["value", "example", "description"]` as information scopes
#### `value`
**Example:**
```yaml
# @var docker_registry_password:value: $ "secret"
docker_registry_password: "secret"
```
```YAML
# @var docker_registry_password:value: "secure_overwrite"
# @var docker_registry_password: "secure_overwrite"
#### `example`
# @var docker_registry_password:example: "%8gv_5GA?"
```yaml
# @var docker_registry_password:example: $ "randomPassw0rd"
# @var docker_registry_password:example: >
# docker_registry_password: "randomPassw0rd"
# @end
docker_registry_password: "secret"
```
#### `description`
```yaml
# @var docker_registry_password:description: Very secure password to login to the docker registry.
# @var docker_registry_password:description: >
# Multi line description are possible as well.
@ -74,21 +62,6 @@ docker_registry_password: "secret"
docker_registry_password: "secret"
```
#### `type`
```yaml
# @var docker_registry_password:type: string
docker_registry_password: "secret"
```
#### `deprecated`
```yaml
# @var docker_registry_password:deprecated: true
# @var docker_registry_password:deprecated: since v1.0.0
docker_registry_password: "secret"
```
### `@tag`
Used tags within the Ansible task files will be auto-discovered. This identifier can be used to define tags manually or add extended information to discovered tags.

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 20 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 30 KiB

After

Width:  |  Height:  |  Size: 37 KiB

View File

@ -1,23 +1,17 @@
# demo-role-custom-header
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-doctor?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-doctor)
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)
Role to demonstrate ansible-doctor. It is also possible to overwrite
the default description with an annotation.
Role to demonstrate ansible-doctor. It is also possible to overwrite the default description with an annotation.
## Table of content
- [Requirements](#requirements)
- [Default Variables](#default-variables)
- [demo_role_deprecated](#demo_role_deprecated)
- [demo_role_deprecated_info](#demo_role_deprecated_info)
- [demo_role_dict](#demo_role_dict)
- [demo_role_empty](#demo_role_empty)
- [demo_role_empty_dict](#demo_role_empty_dict)
- [demo_role_other_tags](#demo_role_other_tags)
- [demo_role_override](#demo_role_override)
- [demo_role_override_complex](#demo_role_override_complex)
- [demo_role_single](#demo_role_single)
- [demo_role_undefined_var](#demo_role_undefined_var)
- [demo_role_unset](#demo_role_unset)
@ -29,33 +23,8 @@ the default description with an annotation.
---
## Requirements
- Minimum Ansible version: `2.10`
## Default Variables
### demo_role_deprecated
**_Deprecated_**<br />
#### Default value
```YAML
demo_role_deprecated: b
```
### demo_role_deprecated_info
**_Deprecated:_** This variable is deprected since `v2.0.0` and will be removed in a future release.<br />
**_Type:_** string<br />
#### Default value
```YAML
demo_role_deprecated_info: a
```
### demo_role_dict
#### Default value
@ -90,8 +59,7 @@ demo_role_empty: ''
### demo_role_empty_dict
... or valid json can be used. In this case, the json will be automatically prefixed with the annotation key
and filters like `to_nice_yaml` can be used in templates. To get it working, the json need to be prefixed with a `$`.
... or valid json can be used. In this case, the json will be automatically prefixed with the annotation key and filters like `to_nice_yaml` can be used in templates. To get it working, the json need to be prefixed with a `$`.
#### Default value
@ -129,24 +97,6 @@ demo_role_other_tags:
- package2
```
### demo_role_override
#### Default value
```YAML
demo_role_override: test
```
### demo_role_override_complex
#### Default value
```YAML
demo_role_override_complex:
foo: bar
second: value
```
### demo_role_single
#### Default value
@ -157,12 +107,7 @@ demo_role_single: b
### demo_role_undefined_var
To highlight a variable that has not set a value by default, this is one way to achieve it.
Make sure to flag it as json value: `@var demo_role_undefined_var: $ "_unset_"`
| Attribute | Description |
| --- | --- |
| value1 | desc1 |
To highlight a variable that has not set a value by default, this is one way to achieve it. Make sure to flag it as json value: `@var demo_role_undefined_var: $ "_unset_"`
#### Default value
@ -204,7 +149,7 @@ demo_role_unset: some_value
## Dependencies
- role2
None.
## License
@ -212,4 +157,4 @@ MIT
## Author
[John Doe](https://blog.example.com)
John Doe

View File

@ -1,4 +1,4 @@
# demo-role-custom-header
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
[![Build Status](https://img.shields.io/drone/build/thegeeklab/ansible-doctor?logo=drone&server=https%3A%2F%2Fdrone.thegeeklab.de)](https://drone.thegeeklab.de/thegeeklab/ansible-doctor)
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)

View File

@ -31,11 +31,6 @@ demo_role_dict:
# @var demo_role_undefined_var:description: >
# To highlight a variable that has not set a value by default, this is one way to achieve it.
# Make sure to flag it as json value: `@var demo_role_undefined_var: $ "_unset_"`
#
# | Attribute | Description |
# | --- | --- |
# | value1 | desc1 |
#
# @end
# @var demo_role_undefined_var: $ "_unset_"
@ -49,19 +44,3 @@ demo_role_dict:
# ]
# @end
demo_role_other_tags: []
## Simple value
# @var demo_role_override: $ "test"
demo_role_override: original
## Complex value
# @var demo_role_override_complex:value: $ {"foo":"bar", "second":"value"}
demo_role_override_complex: {}
# @var demo_role_deprecated:deprecated:
demo_role_deprecated: "b"
# @var demo_role_deprecated_info:deprecated: >
# This variable is deprected since `v2.0.0` and will be removed in a future release.
# @var demo_role_deprecated_info:type: string
demo_role_deprecated_info: "a"

View File

@ -3,20 +3,17 @@
# Role to demonstrate ansible-doctor. It is also possible to overwrite
# the default description with an annotation.
# @end
# @meta author: [John Doe](https\://blog.example.com)
# @meta author: [John Doe](https://blog.example.com)
galaxy_info:
description: Role to demonstrate ansible-doctor.
author: John Doe
license: MIT
min_ansible_version: "2.10"
min_ansible_version: 2.4
platforms:
- name: EL
versions:
- "9"
- 7
galaxy_tags:
- demo
- documentation
dependencies:
- role: role2
- name: namespace.role3
dependencies: []

View File

@ -11,8 +11,6 @@
- name: Demo task with a tag list
debug:
msg: "Demo message"
tags:
- module-tag
tags:
- role-tag1
- role-tag2

View File

@ -1,5 +0,0 @@
---
custom_header: HEADER.md
logging:
level: debug
template: readme

View File

@ -1,4 +0,0 @@
# other-role-custom-header
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)

View File

@ -1,67 +0,0 @@
# other-role-custom-header
[![Build Status](https://ci.thegeeklab.de/api/badges/thegeeklab/ansible-doctor/status.svg)](https://ci.thegeeklab.de/repos/thegeeklab/ansible-doctor)
[![License: GPL-3.0](https://img.shields.io/github/license/thegeeklab/ansible-doctor)](https://github.com/thegeeklab/ansible-doctor/blob/main/LICENSE)
Role to demonstrate ansible-doctor.
## Table of content
- [Requirements](#requirements)
- [Default Variables](#default-variables)
- [demo_role_unset](#demo_role_unset)
- [Discovered Tags](#discovered-tags)
- [Open Tasks](#open-tasks)
- [Dependencies](#dependencies)
- [License](#license)
- [Author](#author)
---
## Requirements
- Minimum Ansible version: `2.10`
## Default Variables
### demo_role_unset
Values can be plain strings, but there is no magic or autoformatting...
#### Default value
```YAML
demo_role_unset:
```
#### Example usage
```YAML
demo_role_unset: some_value
```
## Discovered Tags
**_role-tag1_**
**_role-tag2_**
## Open Tasks
- Unscoped general todo.
- (bug): Some bug that is known and need to be fixed.
- (bug): Multi line description are possible as well. Some bug that is known and need to be fixed.
- (improvement): Some things that need to be improved.
## Dependencies
- role1
- role2
## License
MIT
## Author
[John Doe](https://blog.example.com)

View File

@ -1,4 +0,0 @@
---
# @var demo_role_unset:description: Values can be plain strings, but there is no magic or autoformatting...
# @var demo_role_unset:example: demo_role_unset: some_value
demo_role_unset:

View File

@ -1,19 +0,0 @@
---
# @meta author: [John Doe](https\://blog.example.com)
galaxy_info:
description: Role to demonstrate ansible-doctor.
author: John Doe
license: MIT
min_ansible_version: "2.10"
platforms:
- name: EL
versions:
- "9"
galaxy_tags:
- demo
- documentation
dependencies:
- role1
- role: role2
- name: namespace.role3

View File

@ -1,16 +0,0 @@
---
# @todo bug: Some bug that is known and need to be fixed.
# @todo bug: >
# Multi line description are possible as well.
# Some bug that is known and need to be fixed.
# @end
# @todo improvement: Some things that need to be improved.
# @todo default: Unscoped general todo.
- name: Demo task with a tag list
debug:
msg: "Demo message"
tags:
- role-tag1
- role-tag2

1619
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -10,10 +10,10 @@ classifiers = [
"Natural Language :: English",
"Operating System :: POSIX",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Utilities",
"Topic :: Software Development",
"Topic :: Software Development :: Documentation",
@ -21,53 +21,72 @@ classifiers = [
description = "Generate documentation from annotated Ansible roles using templates."
documentation = "https://ansible-doctor.geekdocs.de/"
homepage = "https://ansible-doctor.geekdocs.de/"
include = ["LICENSE"]
include = [
"LICENSE",
]
keywords = ["ansible", "role", "documentation"]
license = "GPL-3.0-only"
name = "ansible-doctor"
packages = [{ include = "ansibledoctor" }]
packages = [
{include = "ansibledoctor"},
]
readme = "README.md"
repository = "https://github.com/thegeeklab/ansible-doctor/"
version = "0.0.0"
[tool.poetry.dependencies]
Jinja2 = "3.1.4"
anyconfig = "0.14.0"
Jinja2 = "3.1.1"
anyconfig = "0.13.0"
appdirs = "1.4.4"
colorama = "0.4.6"
environs = "11.0.0"
jsonschema = "4.22.0"
pathspec = "0.12.1"
python = "^3.9.0"
python-json-logger = "2.0.7"
"ruamel.yaml" = "0.18.6"
ansible-core = { version = "2.14.17", optional = true }
colorama = "0.4.4"
environs = "9.5.0"
jsonschema = "4.4.0"
nested-lookup = "0.2.23"
pathspec = "0.9.0"
python = "^3.7.0"
python-json-logger = "2.0.2"
"ruamel.yaml" = "0.17.21"
[tool.poetry.extras]
ansible-core = ["ansible-core"]
[tool.poetry.dev-dependencies]
bandit = "1.7.4"
flake8 = "4.0.1"
flake8-blind-except = "0.2.1"
flake8-builtins = "1.5.3"
flake8-docstrings = "1.6.0"
flake8-eradicate = "1.2.0"
flake8-isort = "4.1.1"
flake8-logging-format = "0.6.0"
flake8-pep3101 = "1.3.0"
flake8-polyfill = "1.0.2"
flake8-quotes = "3.3.1"
pep8-naming = "0.12.1"
pydocstyle = "6.1.1"
pytest = "7.1.1"
pytest-cov = "3.0.0"
pytest-mock = "3.7.0"
yapf = "0.32.0"
toml = "0.10.2"
[tool.poetry.scripts]
ansible-doctor = "ansibledoctor.cli:main"
[tool.poetry.group.dev.dependencies]
ruff = "0.4.5"
pytest = "8.2.1"
pytest-mock = "3.14.0"
pytest-cov = "5.0.0"
toml = "0.10.2"
j2lint = "1.1.0"
[tool.poetry-dynamic-versioning]
enable = true
style = "semver"
vcs = "git"
[tool.isort]
default_section = "THIRDPARTY"
force_single_line = true
line_length = 99
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
skip_glob = ["**/.env*", "**/env/*", "**/.venv/*", "**/docs/*"]
[tool.pytest.ini_options]
addopts = "ansibledoctor --cov=ansibledoctor --cov-report=xml:coverage.xml --cov-report=term --no-cov-on-fail"
addopts = "ansibledoctor --cov=ansibledoctor --cov-report=xml:coverage.xml --cov-report=term --cov-append --no-cov-on-fail"
filterwarnings = [
"ignore::FutureWarning",
"ignore::DeprecationWarning",
"ignore:.*collections.*:DeprecationWarning",
"ignore:.*pep8.*:FutureWarning",
]
@ -75,70 +94,5 @@ filterwarnings = [
omit = ["**/test/*"]
[build-system]
build-backend = "poetry_dynamic_versioning.backend"
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core>=1.0.0", "poetry-dynamic-versioning"]
[tool.ruff]
exclude = [
".git",
"__pycache__",
"build",
"dist",
"test",
"*.pyc",
"*.egg-info",
".cache",
".eggs",
"env*",
]
line-length = 99
indent-width = 4
[tool.ruff.lint]
# Explanation of errors
#
# D102: Missing docstring in public method
# D103: Missing docstring in public function
# D105: Missing docstring in magic method
# D107: Missing docstring in __init__
# D202: No blank lines allowed after function docstring
# D203: One blank line required before class docstring
# D212: Multi-line docstring summary should start at the first line
ignore = [
"D102",
"D103",
"D105",
"D107",
"D202",
"D203",
"D212",
"UP038",
"RUF012",
]
select = [
"D",
"E",
"F",
"Q",
"W",
"I",
"S",
"BLE",
"N",
"UP",
"B",
"A",
"C4",
"T20",
"SIM",
"RET",
"ARG",
"ERA",
"RUF",
]
[tool.ruff.format]
quote-style = "double"
indent-style = "space"
line-ending = "lf"

View File

@ -1,17 +1,4 @@
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": ["github>thegeeklab/renovate-presets"],
"packageRules": [
{
"description": "Ansible base dependencies",
"matchPackageNames": ["ansible-core"],
"separateMinorPatch": true
},
{
"matchManagers": ["woodpecker"],
"matchFileNames": [".woodpecker/test.yml"],
"matchPackageNames": ["docker.io/library/python"],
"enabled": false
}
]
"extends": ["github>thegeeklab/renovate-presets"]
}

20
setup.cfg Normal file
View File

@ -0,0 +1,20 @@
[flake8]
# Explanation of errors
#
# D102: Missing docstring in public method
# D103: Missing docstring in public function
# D105: Missing docstring in magic method
# D107: Missing docstring in __init__
# D202: No blank lines allowed after function docstring
# W503:Line break occurred before a binary operator
ignore = D102, D103, D105, D107, D202, W503
max-line-length = 99
inline-quotes = double
exclude = .git, __pycache__, build, dist, test, *.pyc, *.egg-info, .cache, .eggs, env*
[yapf]
based_on_style = google
column_limit = 99
dedent_closing_brackets = true
coalesce_brackets = true
split_before_logical_operator = true